[ 496.699375] env[62070]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62070) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 496.699837] env[62070]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62070) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 496.699837] env[62070]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62070) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 496.700147] env[62070]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 496.799248] env[62070]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62070) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 496.809532] env[62070]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=62070) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 497.418124] env[62070]: INFO nova.virt.driver [None req-3de9694d-2b42-499a-baa2-a7c5eb55da07 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 497.492851] env[62070]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 497.493280] env[62070]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 497.493528] env[62070]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62070) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 500.727620] env[62070]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-6abc6136-8bf1-4df8-a3f6-5257df41be8e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 500.743071] env[62070]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62070) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 500.743257] env[62070]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-3cde4bc5-b6f7-45d9-b38f-63a40eec5e2c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 500.771793] env[62070]: INFO oslo_vmware.api [-] Successfully established new session; session ID is fe60f. [ 500.771986] env[62070]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.279s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 500.772484] env[62070]: INFO nova.virt.vmwareapi.driver [None req-3de9694d-2b42-499a-baa2-a7c5eb55da07 None None] VMware vCenter version: 7.0.3 [ 500.776165] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2fbbd26-0455-495a-943a-c54f731b0fbd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 500.794691] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0606b709-6410-43f8-81ec-522527ee3370 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 500.800653] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2234845b-6f29-4f5c-a08a-21642bc753c6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 500.807190] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69ebd7c-96bc-463e-966a-73ed1be8873f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 500.820096] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6df960-30d2-4fad-b899-7902520b0488 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 500.825942] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bddc9e0e-720b-46b7-9353-f6238b6162af {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 500.855881] env[62070]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-1433c239-3c5e-4d16-854c-a06ed9f18f0f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 500.861514] env[62070]: DEBUG nova.virt.vmwareapi.driver [None req-3de9694d-2b42-499a-baa2-a7c5eb55da07 None None] Extension org.openstack.compute already exists. {{(pid=62070) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:225}} [ 500.864245] env[62070]: INFO nova.compute.provider_config [None req-3de9694d-2b42-499a-baa2-a7c5eb55da07 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 501.368036] env[62070]: DEBUG nova.context [None req-3de9694d-2b42-499a-baa2-a7c5eb55da07 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),aa0d6816-368c-40ad-8806-9499128e19ec(cell1) {{(pid=62070) load_cells /opt/stack/nova/nova/context.py:464}} [ 501.369879] env[62070]: DEBUG oslo_concurrency.lockutils [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 501.369879] env[62070]: DEBUG oslo_concurrency.lockutils [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 501.370383] env[62070]: DEBUG oslo_concurrency.lockutils [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 501.370821] env[62070]: DEBUG oslo_concurrency.lockutils [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] Acquiring lock "aa0d6816-368c-40ad-8806-9499128e19ec" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 501.371013] env[62070]: DEBUG oslo_concurrency.lockutils [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] Lock "aa0d6816-368c-40ad-8806-9499128e19ec" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 501.372086] env[62070]: DEBUG oslo_concurrency.lockutils [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] Lock "aa0d6816-368c-40ad-8806-9499128e19ec" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 501.392458] env[62070]: INFO dbcounter [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] Registered counter for database nova_cell0 [ 501.400847] env[62070]: INFO dbcounter [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] Registered counter for database nova_cell1 [ 501.404338] env[62070]: DEBUG oslo_db.sqlalchemy.engines [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62070) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 501.405133] env[62070]: DEBUG oslo_db.sqlalchemy.engines [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62070) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 501.409642] env[62070]: ERROR nova.db.main.api [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 501.409642] env[62070]: result = function(*args, **kwargs) [ 501.409642] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 501.409642] env[62070]: return func(*args, **kwargs) [ 501.409642] env[62070]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 501.409642] env[62070]: result = fn(*args, **kwargs) [ 501.409642] env[62070]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 501.409642] env[62070]: return f(*args, **kwargs) [ 501.409642] env[62070]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 501.409642] env[62070]: return db.service_get_minimum_version(context, binaries) [ 501.409642] env[62070]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 501.409642] env[62070]: _check_db_access() [ 501.409642] env[62070]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 501.409642] env[62070]: stacktrace = ''.join(traceback.format_stack()) [ 501.409642] env[62070]: [ 501.410621] env[62070]: ERROR nova.db.main.api [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 501.410621] env[62070]: result = function(*args, **kwargs) [ 501.410621] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 501.410621] env[62070]: return func(*args, **kwargs) [ 501.410621] env[62070]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 501.410621] env[62070]: result = fn(*args, **kwargs) [ 501.410621] env[62070]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 501.410621] env[62070]: return f(*args, **kwargs) [ 501.410621] env[62070]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 501.410621] env[62070]: return db.service_get_minimum_version(context, binaries) [ 501.410621] env[62070]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 501.410621] env[62070]: _check_db_access() [ 501.410621] env[62070]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 501.410621] env[62070]: stacktrace = ''.join(traceback.format_stack()) [ 501.410621] env[62070]: [ 501.411031] env[62070]: WARNING nova.objects.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 501.411174] env[62070]: WARNING nova.objects.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] Failed to get minimum service version for cell aa0d6816-368c-40ad-8806-9499128e19ec [ 501.411597] env[62070]: DEBUG oslo_concurrency.lockutils [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] Acquiring lock "singleton_lock" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 501.411756] env[62070]: DEBUG oslo_concurrency.lockutils [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] Acquired lock "singleton_lock" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 501.411994] env[62070]: DEBUG oslo_concurrency.lockutils [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] Releasing lock "singleton_lock" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 501.412322] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] Full set of CONF: {{(pid=62070) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 501.412465] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ******************************************************************************** {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 501.412591] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] Configuration options gathered from: {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 501.412725] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2808}} [ 501.412918] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 501.413067] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ================================================================================ {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2811}} [ 501.413345] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] allow_resize_to_same_host = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.413479] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] arq_binding_timeout = 300 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.413611] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] backdoor_port = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.413736] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] backdoor_socket = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.413896] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] block_device_allocate_retries = 60 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.414070] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] block_device_allocate_retries_interval = 3 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.414270] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cert = self.pem {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.414448] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.414618] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] compute_monitors = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.414968] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] config_dir = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.415161] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] config_drive_format = iso9660 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.415302] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.415469] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] config_source = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.415634] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] console_host = devstack {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.415794] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] control_exchange = nova {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.415950] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cpu_allocation_ratio = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.416118] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] daemon = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.416285] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] debug = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.416440] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] default_access_ip_network_name = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.416600] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] default_availability_zone = nova {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.416751] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] default_ephemeral_format = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.416908] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] default_green_pool_size = 1000 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.417154] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.417322] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] default_schedule_zone = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.417478] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] disk_allocation_ratio = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.417635] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] enable_new_services = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.417810] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] enabled_apis = ['osapi_compute'] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.417974] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] enabled_ssl_apis = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.418159] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] flat_injected = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.418321] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] force_config_drive = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.418476] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] force_raw_images = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.418643] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] graceful_shutdown_timeout = 5 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.418802] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] heal_instance_info_cache_interval = 60 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.419023] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] host = cpu-1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.419202] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.419368] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] initial_disk_allocation_ratio = 1.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.419529] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] initial_ram_allocation_ratio = 1.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.419736] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.419900] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] instance_build_timeout = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.420071] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] instance_delete_interval = 300 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.420240] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] instance_format = [instance: %(uuid)s] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.420402] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] instance_name_template = instance-%08x {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.420558] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] instance_usage_audit = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.420721] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] instance_usage_audit_period = month {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.420878] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.421045] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] instances_path = /opt/stack/data/nova/instances {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.421214] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] internal_service_availability_zone = internal {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.421370] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] key = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.421522] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] live_migration_retry_count = 30 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.421681] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] log_color = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.421839] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] log_config_append = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.421998] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.422181] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] log_dir = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.422360] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] log_file = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.422491] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] log_options = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.422650] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] log_rotate_interval = 1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.422813] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] log_rotate_interval_type = days {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.422972] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] log_rotation_type = none {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.423114] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.423259] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.423444] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.423584] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.423707] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.423869] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] long_rpc_timeout = 1800 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.424035] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] max_concurrent_builds = 10 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.424198] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] max_concurrent_live_migrations = 1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.424354] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] max_concurrent_snapshots = 5 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.424506] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] max_local_block_devices = 3 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.424661] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] max_logfile_count = 30 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.424969] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] max_logfile_size_mb = 200 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.425167] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] maximum_instance_delete_attempts = 5 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.425343] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] metadata_listen = 0.0.0.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.425513] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] metadata_listen_port = 8775 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.425683] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] metadata_workers = 2 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.425841] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] migrate_max_retries = -1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.426047] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] mkisofs_cmd = genisoimage {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.426263] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] my_block_storage_ip = 10.180.1.21 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.426399] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] my_ip = 10.180.1.21 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.426559] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] network_allocate_retries = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.426732] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.426896] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] osapi_compute_listen = 0.0.0.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.427079] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] osapi_compute_listen_port = 8774 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.427252] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] osapi_compute_unique_server_name_scope = {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.427417] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] osapi_compute_workers = 2 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.427576] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] password_length = 12 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.427730] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] periodic_enable = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.427885] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] periodic_fuzzy_delay = 60 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.428064] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] pointer_model = usbtablet {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.428235] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] preallocate_images = none {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.428399] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] publish_errors = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.428529] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] pybasedir = /opt/stack/nova {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.428685] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ram_allocation_ratio = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.428844] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] rate_limit_burst = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.429015] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] rate_limit_except_level = CRITICAL {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.429179] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] rate_limit_interval = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.429340] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] reboot_timeout = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.429497] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] reclaim_instance_interval = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.429649] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] record = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.429813] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] reimage_timeout_per_gb = 60 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.429975] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] report_interval = 120 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.430146] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] rescue_timeout = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.430309] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] reserved_host_cpus = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.430464] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] reserved_host_disk_mb = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.430619] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] reserved_host_memory_mb = 512 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.430775] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] reserved_huge_pages = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.430933] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] resize_confirm_window = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.431102] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] resize_fs_using_block_device = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.431264] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] resume_guests_state_on_host_boot = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.431430] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.431591] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] rpc_response_timeout = 60 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.431748] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] run_external_periodic_tasks = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.431918] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] running_deleted_instance_action = reap {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.432105] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] running_deleted_instance_poll_interval = 1800 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.432271] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] running_deleted_instance_timeout = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.432429] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] scheduler_instance_sync_interval = 120 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.432593] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] service_down_time = 720 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.432756] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] servicegroup_driver = db {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.432907] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] shell_completion = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.433077] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] shelved_offload_time = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.433262] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] shelved_poll_interval = 3600 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.433429] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] shutdown_timeout = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.433590] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] source_is_ipv6 = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.433745] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ssl_only = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.433992] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.434190] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] sync_power_state_interval = 600 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.434350] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] sync_power_state_pool_size = 1000 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.434517] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] syslog_log_facility = LOG_USER {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.434672] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] tempdir = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.434827] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] timeout_nbd = 10 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.435172] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] transport_url = **** {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.435348] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] update_resources_interval = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.435508] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] use_cow_images = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.435664] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] use_eventlog = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.435821] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] use_journal = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.435976] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] use_json = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.436147] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] use_rootwrap_daemon = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.436308] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] use_stderr = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.436464] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] use_syslog = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.436618] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vcpu_pin_set = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.436783] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vif_plugging_is_fatal = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.436950] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vif_plugging_timeout = 300 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.437141] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] virt_mkfs = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.437309] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] volume_usage_poll_interval = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.437471] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] watch_log_file = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.437636] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] web = /usr/share/spice-html5 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 501.437819] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.437986] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.438163] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.438337] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_concurrency.disable_process_locking = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.438918] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.439124] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.439297] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.439471] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.439642] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.439806] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.439987] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api.auth_strategy = keystone {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.440170] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api.compute_link_prefix = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.440347] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.440518] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api.dhcp_domain = novalocal {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.440685] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api.enable_instance_password = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.440849] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api.glance_link_prefix = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.441016] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.441219] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.441389] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api.instance_list_per_project_cells = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.441552] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api.list_records_by_skipping_down_cells = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.441714] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api.local_metadata_per_cell = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.441880] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api.max_limit = 1000 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.442068] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api.metadata_cache_expiration = 15 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.442253] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api.neutron_default_tenant_id = default {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.442428] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api.response_validation = warn {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.442599] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api.use_neutron_default_nets = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.442767] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.442930] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.443110] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.443311] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.443490] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api.vendordata_dynamic_targets = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.443653] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api.vendordata_jsonfile_path = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.443830] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.444034] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.backend = dogpile.cache.memcached {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.444205] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.backend_argument = **** {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.444376] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.config_prefix = cache.oslo {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.444551] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.dead_timeout = 60.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.444709] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.debug_cache_backend = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.444867] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.enable_retry_client = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.445182] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.enable_socket_keepalive = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.445388] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.enabled = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.445559] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.enforce_fips_mode = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.445723] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.expiration_time = 600 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.445888] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.hashclient_retry_attempts = 2 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.446072] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.hashclient_retry_delay = 1.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.446241] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.memcache_dead_retry = 300 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.446403] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.memcache_password = **** {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.446567] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.446728] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.446890] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.memcache_pool_maxsize = 10 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.447075] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.447249] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.memcache_sasl_enabled = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.447430] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.447598] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.memcache_socket_timeout = 1.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.447754] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.memcache_username = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.447925] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.proxies = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.448120] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.redis_db = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.448407] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.redis_password = **** {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.448589] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.redis_sentinel_service_name = mymaster {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.448770] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.448997] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.redis_server = localhost:6379 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.449215] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.redis_socket_timeout = 1.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.449382] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.redis_username = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.449547] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.retry_attempts = 2 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.449713] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.retry_delay = 0.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.449880] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.socket_keepalive_count = 1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.450054] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.socket_keepalive_idle = 1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.450220] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.socket_keepalive_interval = 1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.450380] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.tls_allowed_ciphers = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.450540] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.tls_cafile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.450696] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.tls_certfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.450855] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.tls_enabled = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.451018] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cache.tls_keyfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.451280] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cinder.auth_section = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.451476] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cinder.auth_type = password {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.451641] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cinder.cafile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.451819] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cinder.catalog_info = volumev3::publicURL {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.451981] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cinder.certfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.452164] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cinder.collect_timing = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.452330] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cinder.cross_az_attach = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.452492] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cinder.debug = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.452653] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cinder.endpoint_template = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.452815] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cinder.http_retries = 3 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.452976] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cinder.insecure = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.453161] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cinder.keyfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.453343] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cinder.os_region_name = RegionOne {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.453514] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cinder.split_loggers = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.453674] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cinder.timeout = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.453846] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.454016] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] compute.cpu_dedicated_set = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.454214] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] compute.cpu_shared_set = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.454370] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] compute.image_type_exclude_list = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.454534] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.454698] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] compute.max_concurrent_disk_ops = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.454858] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] compute.max_disk_devices_to_attach = -1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.455038] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.455363] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.455565] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] compute.resource_provider_association_refresh = 300 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.455735] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.455899] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] compute.shutdown_retry_interval = 10 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.456097] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.456283] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] conductor.workers = 2 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.456468] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] console.allowed_origins = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.456634] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] console.ssl_ciphers = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.456804] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] console.ssl_minimum_version = default {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.456973] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] consoleauth.enforce_session_timeout = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.457157] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] consoleauth.token_ttl = 600 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.457328] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cyborg.cafile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.457516] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cyborg.certfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.457724] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cyborg.collect_timing = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.457897] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cyborg.connect_retries = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.458070] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cyborg.connect_retry_delay = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.458233] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cyborg.endpoint_override = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.458399] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cyborg.insecure = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.458556] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cyborg.keyfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.458716] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cyborg.max_version = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.458871] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cyborg.min_version = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.459035] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cyborg.region_name = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.459200] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cyborg.retriable_status_codes = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.459358] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cyborg.service_name = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.459525] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cyborg.service_type = accelerator {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.459686] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cyborg.split_loggers = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.459844] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cyborg.status_code_retries = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.460009] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cyborg.status_code_retry_delay = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.460188] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cyborg.timeout = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.460375] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.460595] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] cyborg.version = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.460792] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] database.backend = sqlalchemy {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.460965] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] database.connection = **** {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.461150] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] database.connection_debug = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.461323] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] database.connection_parameters = {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.461489] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] database.connection_recycle_time = 3600 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.461654] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] database.connection_trace = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.461815] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] database.db_inc_retry_interval = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.461978] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] database.db_max_retries = 20 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.462155] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] database.db_max_retry_interval = 10 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.462321] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] database.db_retry_interval = 1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.462484] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] database.max_overflow = 50 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.462645] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] database.max_pool_size = 5 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.462804] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] database.max_retries = 10 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.462972] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.463161] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] database.mysql_wsrep_sync_wait = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.463370] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] database.pool_timeout = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.463557] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] database.retry_interval = 10 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.463719] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] database.slave_connection = **** {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.463880] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] database.sqlite_synchronous = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.464061] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] database.use_db_reconnect = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.464280] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api_database.backend = sqlalchemy {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.464464] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api_database.connection = **** {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.464632] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api_database.connection_debug = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.464801] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api_database.connection_parameters = {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.464966] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api_database.connection_recycle_time = 3600 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.465147] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api_database.connection_trace = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.465455] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api_database.db_inc_retry_interval = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.465658] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api_database.db_max_retries = 20 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.465826] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api_database.db_max_retry_interval = 10 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.465992] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api_database.db_retry_interval = 1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.466171] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api_database.max_overflow = 50 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.466338] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api_database.max_pool_size = 5 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.466503] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api_database.max_retries = 10 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.466673] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.466832] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.466990] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api_database.pool_timeout = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.467169] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api_database.retry_interval = 10 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.467331] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api_database.slave_connection = **** {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.467491] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] api_database.sqlite_synchronous = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.467670] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] devices.enabled_mdev_types = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.467848] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.468035] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ephemeral_storage_encryption.default_format = luks {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.468211] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ephemeral_storage_encryption.enabled = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.468381] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.468552] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.api_servers = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.468717] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.cafile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.468880] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.certfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.469052] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.collect_timing = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.469217] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.connect_retries = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.469378] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.connect_retry_delay = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.469539] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.debug = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.469705] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.default_trusted_certificate_ids = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.469867] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.enable_certificate_validation = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.470037] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.enable_rbd_download = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.470200] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.endpoint_override = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.470367] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.insecure = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.470529] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.keyfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.470686] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.max_version = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.470843] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.min_version = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.471015] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.num_retries = 3 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.471182] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.rbd_ceph_conf = {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.471346] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.rbd_connect_timeout = 5 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.471508] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.rbd_pool = {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.471674] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.rbd_user = {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.471829] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.region_name = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.471988] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.retriable_status_codes = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.472158] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.service_name = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.472360] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.service_type = image {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.472528] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.split_loggers = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.472691] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.status_code_retries = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.472848] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.status_code_retry_delay = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.473011] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.timeout = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.473242] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.473423] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.verify_glance_signatures = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.473585] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] glance.version = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.473751] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] guestfs.debug = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.473965] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] mks.enabled = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.474325] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.474528] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] image_cache.manager_interval = 2400 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.474701] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] image_cache.precache_concurrency = 1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.474871] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] image_cache.remove_unused_base_images = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.475053] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.475226] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.475487] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] image_cache.subdirectory_name = _base {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.475752] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.api_max_retries = 60 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.475925] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.api_retry_interval = 2 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.476102] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.auth_section = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.476273] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.auth_type = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.476438] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.cafile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.476595] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.certfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.476758] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.collect_timing = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.476920] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.conductor_group = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.477088] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.connect_retries = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.477251] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.connect_retry_delay = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.477406] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.endpoint_override = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.477566] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.insecure = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.477722] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.keyfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.477876] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.max_version = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.478048] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.min_version = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.478226] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.peer_list = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.478386] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.region_name = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.478545] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.retriable_status_codes = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.478709] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.serial_console_state_timeout = 10 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.478865] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.service_name = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.479046] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.service_type = baremetal {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.479212] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.shard = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.479377] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.split_loggers = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.479539] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.status_code_retries = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.479697] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.status_code_retry_delay = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.479853] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.timeout = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.480043] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.480209] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ironic.version = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.480394] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.480564] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] key_manager.fixed_key = **** {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.480741] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.480901] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican.barbican_api_version = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.481069] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican.barbican_endpoint = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.481244] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican.barbican_endpoint_type = public {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.481407] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican.barbican_region_name = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.481564] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican.cafile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.481723] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican.certfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.481885] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican.collect_timing = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.482053] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican.insecure = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.482214] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican.keyfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.482380] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican.number_of_retries = 60 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.482539] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican.retry_delay = 1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.482699] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican.send_service_user_token = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.482858] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican.split_loggers = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.483032] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican.timeout = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.483231] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican.verify_ssl = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.483402] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican.verify_ssl_path = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.483570] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican_service_user.auth_section = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.483732] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican_service_user.auth_type = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.483887] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican_service_user.cafile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.484075] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican_service_user.certfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.484278] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican_service_user.collect_timing = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.484505] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican_service_user.insecure = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.484680] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican_service_user.keyfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.484845] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican_service_user.split_loggers = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.485009] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] barbican_service_user.timeout = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.485186] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vault.approle_role_id = **** {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.485348] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vault.approle_secret_id = **** {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.485678] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vault.kv_mountpoint = secret {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.485900] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vault.kv_path = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.486089] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vault.kv_version = 2 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.486257] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vault.namespace = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.486421] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vault.root_token_id = **** {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.486578] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vault.ssl_ca_crt_file = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.486746] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vault.timeout = 60.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.486911] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vault.use_ssl = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.487090] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.487271] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] keystone.auth_section = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.487435] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] keystone.auth_type = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.487594] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] keystone.cafile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.487750] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] keystone.certfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.487911] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] keystone.collect_timing = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.488093] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] keystone.connect_retries = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.488261] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] keystone.connect_retry_delay = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.488420] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] keystone.endpoint_override = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.488581] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] keystone.insecure = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.488735] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] keystone.keyfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.488890] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] keystone.max_version = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.489056] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] keystone.min_version = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.489217] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] keystone.region_name = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.489376] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] keystone.retriable_status_codes = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.489530] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] keystone.service_name = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.489696] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] keystone.service_type = identity {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.489857] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] keystone.split_loggers = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.490019] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] keystone.status_code_retries = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.490179] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] keystone.status_code_retry_delay = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.490343] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] keystone.timeout = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.490519] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.490677] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] keystone.version = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.490873] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.connection_uri = {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.491044] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.cpu_mode = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.491216] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.cpu_model_extra_flags = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.491386] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.cpu_models = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.491555] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.cpu_power_governor_high = performance {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.491720] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.cpu_power_governor_low = powersave {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.491881] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.cpu_power_management = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.492057] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.492227] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.device_detach_attempts = 8 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.492390] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.device_detach_timeout = 20 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.492558] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.disk_cachemodes = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.492715] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.disk_prefix = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.492879] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.enabled_perf_events = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.493062] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.file_backed_memory = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.493269] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.gid_maps = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.493443] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.hw_disk_discard = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.493605] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.hw_machine_type = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.493782] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.images_rbd_ceph_conf = {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.493953] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.494174] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.494354] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.images_rbd_glance_store_name = {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.494549] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.images_rbd_pool = rbd {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.494718] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.images_type = default {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.494874] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.images_volume_group = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.495045] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.inject_key = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.495212] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.inject_partition = -2 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.495375] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.inject_password = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.495537] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.iscsi_iface = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.495891] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.iser_use_multipath = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.496115] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.live_migration_bandwidth = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.496367] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.496556] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.live_migration_downtime = 500 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.496725] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.496889] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.497060] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.live_migration_inbound_addr = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.497227] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.497446] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.live_migration_permit_post_copy = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.497633] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.live_migration_scheme = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.497812] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.live_migration_timeout_action = abort {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.497983] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.live_migration_tunnelled = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.498174] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.live_migration_uri = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.498347] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.live_migration_with_native_tls = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.498508] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.max_queues = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.498674] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.498910] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.499087] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.nfs_mount_options = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.499391] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.499568] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.499736] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.num_iser_scan_tries = 5 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.499899] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.num_memory_encrypted_guests = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.500075] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.500241] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.num_pcie_ports = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.500408] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.num_volume_scan_tries = 5 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.500575] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.pmem_namespaces = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.500732] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.quobyte_client_cfg = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.501025] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.501204] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.rbd_connect_timeout = 5 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.501373] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.501536] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.501695] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.rbd_secret_uuid = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.501852] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.rbd_user = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.502023] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.502197] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.remote_filesystem_transport = ssh {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.502356] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.rescue_image_id = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.502511] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.rescue_kernel_id = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.502666] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.rescue_ramdisk_id = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.502830] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.502984] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.rx_queue_size = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.503192] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.smbfs_mount_options = {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.503483] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.503657] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.snapshot_compression = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.503819] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.snapshot_image_format = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.504050] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.504278] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.sparse_logical_volumes = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.504435] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.swtpm_enabled = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.504613] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.swtpm_group = tss {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.504781] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.swtpm_user = tss {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.504949] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.sysinfo_serial = unique {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.505121] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.tb_cache_size = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.505280] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.tx_queue_size = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.505450] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.uid_maps = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.505610] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.use_virtio_for_bridges = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.505869] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.virt_type = kvm {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.506077] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.volume_clear = zero {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.506252] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.volume_clear_size = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.506424] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.volume_use_multipath = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.506586] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.vzstorage_cache_path = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.506757] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.506927] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.vzstorage_mount_group = qemu {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.507103] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.vzstorage_mount_opts = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.507276] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.507552] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.507728] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.vzstorage_mount_user = stack {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.507934] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.508178] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.auth_section = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.508367] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.auth_type = password {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.508531] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.cafile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.508690] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.certfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.508854] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.collect_timing = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.509029] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.connect_retries = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.509289] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.connect_retry_delay = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.509361] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.default_floating_pool = public {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.509519] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.endpoint_override = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.509682] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.extension_sync_interval = 600 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.509842] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.http_retries = 3 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.509999] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.insecure = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.510173] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.keyfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.510334] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.max_version = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.510500] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.510655] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.min_version = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.510821] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.ovs_bridge = br-int {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.510983] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.physnets = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.511164] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.region_name = RegionOne {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.511327] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.retriable_status_codes = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.511494] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.service_metadata_proxy = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.511653] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.service_name = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.511820] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.service_type = network {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.511982] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.split_loggers = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.512152] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.status_code_retries = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.512313] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.status_code_retry_delay = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.512470] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.timeout = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.512651] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.512808] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] neutron.version = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.512975] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] notifications.bdms_in_notifications = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.513194] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] notifications.default_level = INFO {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.513388] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] notifications.notification_format = unversioned {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.513550] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] notifications.notify_on_state_change = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.513725] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.513898] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] pci.alias = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.514081] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] pci.device_spec = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.514297] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] pci.report_in_placement = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.514561] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.auth_section = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.514761] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.auth_type = password {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.514930] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.515105] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.cafile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.515265] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.certfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.515431] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.collect_timing = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.515588] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.connect_retries = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.515745] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.connect_retry_delay = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.515905] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.default_domain_id = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.516173] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.default_domain_name = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.516343] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.domain_id = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.516502] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.domain_name = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.516661] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.endpoint_override = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.516822] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.insecure = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.516976] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.keyfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.517144] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.max_version = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.517364] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.min_version = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.517645] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.password = **** {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.517920] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.project_domain_id = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.518241] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.project_domain_name = Default {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.518541] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.project_id = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.518866] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.project_name = service {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.519195] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.region_name = RegionOne {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.519514] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.retriable_status_codes = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.519801] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.service_name = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.520083] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.service_type = placement {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.520286] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.split_loggers = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.520456] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.status_code_retries = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.520621] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.status_code_retry_delay = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.520785] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.system_scope = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.520945] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.timeout = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.521117] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.trust_id = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.521282] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.user_domain_id = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.521455] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.user_domain_name = Default {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.521616] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.user_id = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.521787] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.username = nova {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.521968] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.522142] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] placement.version = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.522324] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] quota.cores = 20 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.522487] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] quota.count_usage_from_placement = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.522657] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.522833] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] quota.injected_file_content_bytes = 10240 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.523008] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] quota.injected_file_path_length = 255 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.523217] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] quota.injected_files = 5 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.523400] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] quota.instances = 10 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.523569] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] quota.key_pairs = 100 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.523735] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] quota.metadata_items = 128 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.523901] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] quota.ram = 51200 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.524078] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] quota.recheck_quota = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.524287] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] quota.server_group_members = 10 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.524488] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] quota.server_groups = 10 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.524651] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.524816] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.524978] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] scheduler.image_metadata_prefilter = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.525154] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.525321] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] scheduler.max_attempts = 3 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.525482] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] scheduler.max_placement_results = 1000 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.525647] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.525804] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] scheduler.query_placement_for_image_type_support = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.525971] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.526236] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] scheduler.workers = 2 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.526423] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.526596] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.526778] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.526950] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.527130] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.527300] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.527466] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.527660] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.527831] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.host_subset_size = 1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.527998] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.528195] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.528367] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.528532] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.isolated_hosts = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.528694] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.isolated_images = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.528856] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.529024] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.529192] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.529354] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.pci_in_placement = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.529513] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.529677] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.529835] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.529992] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.530167] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.530331] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.530490] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.track_instance_changes = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.530667] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.530835] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] metrics.required = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.530998] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] metrics.weight_multiplier = 1.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.531172] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.531339] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] metrics.weight_setting = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.531652] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.531823] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] serial_console.enabled = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.531998] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] serial_console.port_range = 10000:20000 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.532179] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.532379] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.532553] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] serial_console.serialproxy_port = 6083 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.532723] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] service_user.auth_section = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.532895] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] service_user.auth_type = password {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.533078] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] service_user.cafile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.533270] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] service_user.certfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.533445] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] service_user.collect_timing = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.533608] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] service_user.insecure = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.533766] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] service_user.keyfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.533933] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] service_user.send_service_user_token = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.534111] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] service_user.split_loggers = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.534305] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] service_user.timeout = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.534490] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] spice.agent_enabled = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.534658] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] spice.enabled = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.534968] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.535178] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.535348] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] spice.html5proxy_port = 6082 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.535509] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] spice.image_compression = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.535666] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] spice.jpeg_compression = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.535822] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] spice.playback_compression = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.535983] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] spice.require_secure = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.536244] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] spice.server_listen = 127.0.0.1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.536442] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.536602] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] spice.streaming_mode = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.536761] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] spice.zlib_compression = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.536927] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] upgrade_levels.baseapi = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.537117] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] upgrade_levels.compute = auto {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.537282] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] upgrade_levels.conductor = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.537443] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] upgrade_levels.scheduler = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.537608] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vendordata_dynamic_auth.auth_section = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.537769] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vendordata_dynamic_auth.auth_type = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.537926] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vendordata_dynamic_auth.cafile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.538106] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vendordata_dynamic_auth.certfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.538276] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.538439] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vendordata_dynamic_auth.insecure = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.538595] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vendordata_dynamic_auth.keyfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.538756] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.538912] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vendordata_dynamic_auth.timeout = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.539099] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vmware.api_retry_count = 10 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.539261] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vmware.ca_file = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.539430] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vmware.cache_prefix = devstack-image-cache {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.539595] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vmware.cluster_name = testcl1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.539759] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vmware.connection_pool_size = 10 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.539918] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vmware.console_delay_seconds = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.540096] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vmware.datastore_regex = ^datastore.* {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.540310] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.540484] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vmware.host_password = **** {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.540648] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vmware.host_port = 443 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.540812] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vmware.host_username = administrator@vsphere.local {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.540977] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vmware.insecure = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.541150] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vmware.integration_bridge = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.541315] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vmware.maximum_objects = 100 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.541472] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vmware.pbm_default_policy = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.541632] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vmware.pbm_enabled = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.541789] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vmware.pbm_wsdl_location = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.541958] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.542122] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vmware.serial_port_proxy_uri = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.542290] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vmware.serial_port_service_uri = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.542518] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vmware.task_poll_interval = 0.5 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.542702] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vmware.use_linked_clone = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.542873] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vmware.vnc_keymap = en-us {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.543061] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vmware.vnc_port = 5900 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.543251] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vmware.vnc_port_total = 10000 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.543450] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vnc.auth_schemes = ['none'] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.543625] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vnc.enabled = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.543911] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.544105] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.544291] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vnc.novncproxy_port = 6080 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.544470] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vnc.server_listen = 127.0.0.1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.544649] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.544804] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vnc.vencrypt_ca_certs = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.544961] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vnc.vencrypt_client_cert = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.545129] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vnc.vencrypt_client_key = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.545312] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.545477] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] workarounds.disable_deep_image_inspection = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.545636] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.545795] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.545955] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.546128] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] workarounds.disable_rootwrap = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.546361] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] workarounds.enable_numa_live_migration = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.546544] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.546705] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.546864] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.547035] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] workarounds.libvirt_disable_apic = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.547199] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.547364] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.547523] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.547683] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.547841] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.547998] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.548182] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.548341] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.548501] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.548683] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.548843] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.549026] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] wsgi.client_socket_timeout = 900 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.549188] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] wsgi.default_pool_size = 1000 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.549356] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] wsgi.keep_alive = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.549521] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] wsgi.max_header_line = 16384 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.549680] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] wsgi.secure_proxy_ssl_header = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.549838] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] wsgi.ssl_ca_file = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.549998] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] wsgi.ssl_cert_file = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.550171] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] wsgi.ssl_key_file = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.550337] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] wsgi.tcp_keepidle = 600 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.550512] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.550677] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] zvm.ca_file = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.550832] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] zvm.cloud_connector_url = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.551130] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.551308] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] zvm.reachable_timeout = 300 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.551492] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_policy.enforce_new_defaults = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.551886] env[62070]: WARNING oslo_config.cfg [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 501.552079] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_policy.enforce_scope = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.552259] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_policy.policy_default_rule = default {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.552445] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.552619] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_policy.policy_file = policy.yaml {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.552794] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.552956] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.553160] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.553318] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.553482] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.553650] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.553824] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.553997] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] profiler.connection_string = messaging:// {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.554181] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] profiler.enabled = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.554377] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] profiler.es_doc_type = notification {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.554547] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] profiler.es_scroll_size = 10000 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.554715] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] profiler.es_scroll_time = 2m {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.554875] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] profiler.filter_error_trace = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.555051] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] profiler.hmac_keys = **** {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.555224] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] profiler.sentinel_service_name = mymaster {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.555395] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] profiler.socket_timeout = 0.1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.555560] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] profiler.trace_requests = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.555723] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] profiler.trace_sqlalchemy = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.555905] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] profiler_jaeger.process_tags = {} {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.556077] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] profiler_jaeger.service_name_prefix = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.556243] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] profiler_otlp.service_name_prefix = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.556497] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] remote_debug.host = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.556679] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] remote_debug.port = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.556862] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.557038] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.557208] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.557372] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.557536] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.557696] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.557856] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.558035] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.558209] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.558382] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.558539] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.558707] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.558871] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.559050] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.559223] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.559390] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.559554] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.559724] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.559887] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.560057] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.560225] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.560391] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.560552] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.560718] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.560879] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.561051] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.561216] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.561377] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.561541] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.561703] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.ssl = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.561870] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.562046] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.562212] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.562383] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.562551] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.ssl_version = {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.562713] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.562899] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.563087] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_notifications.retry = -1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.563319] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.563504] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_messaging_notifications.transport_url = **** {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.563678] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_limit.auth_section = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.563844] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_limit.auth_type = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.564014] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_limit.cafile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.564173] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_limit.certfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.564358] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_limit.collect_timing = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.564524] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_limit.connect_retries = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.564684] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_limit.connect_retry_delay = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.564841] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_limit.endpoint_id = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.564997] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_limit.endpoint_override = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.565175] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_limit.insecure = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.565331] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_limit.keyfile = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.565486] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_limit.max_version = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.565642] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_limit.min_version = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.565796] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_limit.region_name = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.565954] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_limit.retriable_status_codes = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.566122] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_limit.service_name = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.566279] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_limit.service_type = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.566462] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_limit.split_loggers = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.566695] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_limit.status_code_retries = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.566861] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_limit.status_code_retry_delay = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.567040] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_limit.timeout = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.567211] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_limit.valid_interfaces = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.567371] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_limit.version = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.567536] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_reports.file_event_handler = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.567700] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.567859] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] oslo_reports.log_dir = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.568043] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.568207] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.568366] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.568531] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.568693] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.568849] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.569025] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.569189] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vif_plug_ovs_privileged.group = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.569350] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.569512] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.569673] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.569830] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] vif_plug_ovs_privileged.user = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.569997] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] os_vif_linux_bridge.flat_interface = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.570191] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.570367] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.570543] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.570710] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.570876] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.571053] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.571220] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.571400] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.571574] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] os_vif_ovs.isolate_vif = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.571738] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.571907] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.572096] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.572274] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] os_vif_ovs.ovsdb_interface = native {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.572441] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] os_vif_ovs.per_port_bridge = False {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.572613] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] privsep_osbrick.capabilities = [21] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.572771] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] privsep_osbrick.group = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.572928] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] privsep_osbrick.helper_command = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.573103] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.573301] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.573468] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] privsep_osbrick.user = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.573643] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.573802] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] nova_sys_admin.group = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.573957] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] nova_sys_admin.helper_command = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.574142] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.574332] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.574496] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] nova_sys_admin.user = None {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 501.574630] env[62070]: DEBUG oslo_service.service [None req-92d635fd-25f5-4677-9892-a89d0b8aa867 None None] ******************************************************************************** {{(pid=62070) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2830}} [ 501.575157] env[62070]: INFO nova.service [-] Starting compute node (version 30.1.0) [ 502.078193] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] Getting list of instances from cluster (obj){ [ 502.078193] env[62070]: value = "domain-c8" [ 502.078193] env[62070]: _type = "ClusterComputeResource" [ 502.078193] env[62070]: } {{(pid=62070) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 502.079365] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a99d7906-8da5-4729-b064-74160fb9f3ef {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 502.088141] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] Got total of 0 instances {{(pid=62070) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 502.088714] env[62070]: WARNING nova.virt.vmwareapi.driver [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 502.089173] env[62070]: INFO nova.virt.node [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] Generated node identity 21c7c111-1b69-4468-b2c4-5dd96014fbd6 [ 502.089408] env[62070]: INFO nova.virt.node [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] Wrote node identity 21c7c111-1b69-4468-b2c4-5dd96014fbd6 to /opt/stack/data/n-cpu-1/compute_id [ 502.592065] env[62070]: WARNING nova.compute.manager [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] Compute nodes ['21c7c111-1b69-4468-b2c4-5dd96014fbd6'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 503.598365] env[62070]: INFO nova.compute.manager [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 504.603519] env[62070]: WARNING nova.compute.manager [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 504.603866] env[62070]: DEBUG oslo_concurrency.lockutils [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 504.604061] env[62070]: DEBUG oslo_concurrency.lockutils [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 504.604216] env[62070]: DEBUG oslo_concurrency.lockutils [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 504.604386] env[62070]: DEBUG nova.compute.resource_tracker [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62070) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 504.605322] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd4f3a41-2350-43c2-876b-fd6cc2f272b5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.613699] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97168767-5b01-4278-8c0f-fd4319ee61f9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.626892] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d49ec93-09ce-41ec-a134-babf977c2ef8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.632798] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4efc9587-3dab-412d-9b70-5088d4b43b51 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.661356] env[62070]: DEBUG nova.compute.resource_tracker [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181578MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=62070) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 504.661495] env[62070]: DEBUG oslo_concurrency.lockutils [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 504.661669] env[62070]: DEBUG oslo_concurrency.lockutils [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 505.163792] env[62070]: WARNING nova.compute.resource_tracker [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] No compute node record for cpu-1:21c7c111-1b69-4468-b2c4-5dd96014fbd6: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 21c7c111-1b69-4468-b2c4-5dd96014fbd6 could not be found. [ 505.667817] env[62070]: INFO nova.compute.resource_tracker [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 [ 507.176704] env[62070]: DEBUG nova.compute.resource_tracker [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62070) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 507.177049] env[62070]: DEBUG nova.compute.resource_tracker [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62070) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 507.327366] env[62070]: INFO nova.scheduler.client.report [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] [req-2e4e8020-2d23-4d22-a87a-90fa952bd7e9] Created resource provider record via placement API for resource provider with UUID 21c7c111-1b69-4468-b2c4-5dd96014fbd6 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 507.344162] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b3e54c9-e4e1-45f3-a4a9-34a4e091c58a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.351624] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cccfbc9a-5d95-49a8-ba38-ae2da1b96fe8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.380678] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2f048b-378d-43fd-8f4b-9b0d0db07e5b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.387263] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ab19f1-264c-4773-b8ef-63b2e338dbc7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.399614] env[62070]: DEBUG nova.compute.provider_tree [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 507.936526] env[62070]: DEBUG nova.scheduler.client.report [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] Updated inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 507.936767] env[62070]: DEBUG nova.compute.provider_tree [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] Updating resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 generation from 0 to 1 during operation: update_inventory {{(pid=62070) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 507.936908] env[62070]: DEBUG nova.compute.provider_tree [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 507.992356] env[62070]: DEBUG nova.compute.provider_tree [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] Updating resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 generation from 1 to 2 during operation: update_traits {{(pid=62070) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 508.496829] env[62070]: DEBUG nova.compute.resource_tracker [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62070) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 508.497174] env[62070]: DEBUG oslo_concurrency.lockutils [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.835s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 508.497295] env[62070]: DEBUG nova.service [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] Creating RPC server for service compute {{(pid=62070) start /opt/stack/nova/nova/service.py:186}} [ 508.511183] env[62070]: DEBUG nova.service [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] Join ServiceGroup membership for this service compute {{(pid=62070) start /opt/stack/nova/nova/service.py:203}} [ 508.511364] env[62070]: DEBUG nova.servicegroup.drivers.db [None req-475a1031-11e9-4b1e-be57-091e75f33f70 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62070) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 541.513511] env[62070]: DEBUG oslo_concurrency.lockutils [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Acquiring lock "283e7488-1240-475f-a74d-809251950774" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.513853] env[62070]: DEBUG oslo_concurrency.lockutils [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Lock "283e7488-1240-475f-a74d-809251950774" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.016792] env[62070]: DEBUG nova.compute.manager [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 542.513177] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 542.570011] env[62070]: DEBUG oslo_concurrency.lockutils [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.570379] env[62070]: DEBUG oslo_concurrency.lockutils [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.572369] env[62070]: INFO nova.compute.claims [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 543.017438] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Getting list of instances from cluster (obj){ [ 543.017438] env[62070]: value = "domain-c8" [ 543.017438] env[62070]: _type = "ClusterComputeResource" [ 543.017438] env[62070]: } {{(pid=62070) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 543.018683] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f90d3e-5c22-4c23-ba91-2bf899257f97 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.028161] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Got total of 0 instances {{(pid=62070) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 543.028161] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 543.028161] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Getting list of instances from cluster (obj){ [ 543.028161] env[62070]: value = "domain-c8" [ 543.028161] env[62070]: _type = "ClusterComputeResource" [ 543.028161] env[62070]: } {{(pid=62070) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 543.029311] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4abae58e-3622-4ce8-9260-2dc9a0f0b9cd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.041523] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Got total of 0 instances {{(pid=62070) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 543.636320] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b469fad-42b1-4658-9b9f-ae3ed228cf8c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.646251] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d99c3af7-9a70-4626-b94d-920ab5a672ac {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.694443] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80720735-1d03-4885-814e-7770d8cc5ac6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.703455] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a11c76d2-51e3-48bf-8681-49432c8b22ec {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.717647] env[62070]: DEBUG nova.compute.provider_tree [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 544.220965] env[62070]: DEBUG nova.scheduler.client.report [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 544.729020] env[62070]: DEBUG oslo_concurrency.lockutils [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.156s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 544.729020] env[62070]: DEBUG nova.compute.manager [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 545.081912] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Acquiring lock "9688a493-7046-49e1-b5ab-0db9cfbf37aa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.082226] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Lock "9688a493-7046-49e1-b5ab-0db9cfbf37aa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.238391] env[62070]: DEBUG nova.compute.utils [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 545.244216] env[62070]: DEBUG nova.compute.manager [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Not allocating networking since 'none' was specified. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 545.585518] env[62070]: DEBUG nova.compute.manager [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 545.746313] env[62070]: DEBUG nova.compute.manager [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 546.129357] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.129357] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.140141] env[62070]: INFO nova.compute.claims [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 546.591802] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Acquiring lock "7221a720-8ab9-44fd-abe2-8f8fc19b6433" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.592197] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Lock "7221a720-8ab9-44fd-abe2-8f8fc19b6433" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.763261] env[62070]: DEBUG nova.compute.manager [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 547.079945] env[62070]: DEBUG oslo_concurrency.lockutils [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Acquiring lock "87462fbe-d62d-4b40-880c-a1785c9ed5d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.080241] env[62070]: DEBUG oslo_concurrency.lockutils [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Lock "87462fbe-d62d-4b40-880c-a1785c9ed5d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.096822] env[62070]: DEBUG nova.compute.manager [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 547.256528] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b36142-65a1-4f6d-8fa4-625a459a94ab {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.265566] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e19f97-ac2f-437c-b403-73aec36f3639 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.299379] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-466aa510-28f4-4707-9199-b750e7c2b04e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.307154] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cf929e9-2999-4fe7-9cef-8873aacb7d60 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.322802] env[62070]: DEBUG nova.compute.provider_tree [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 547.477669] env[62070]: DEBUG nova.virt.hardware [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 547.477884] env[62070]: DEBUG nova.virt.hardware [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 547.481382] env[62070]: DEBUG nova.virt.hardware [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 547.481382] env[62070]: DEBUG nova.virt.hardware [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 547.481496] env[62070]: DEBUG nova.virt.hardware [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 547.481752] env[62070]: DEBUG nova.virt.hardware [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 547.481901] env[62070]: DEBUG nova.virt.hardware [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 547.482677] env[62070]: DEBUG nova.virt.hardware [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 547.482677] env[62070]: DEBUG nova.virt.hardware [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 547.482677] env[62070]: DEBUG nova.virt.hardware [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 547.482826] env[62070]: DEBUG nova.virt.hardware [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 547.483780] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3fca054-9b9b-4e22-abc0-f25ff1c1e3e8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.498218] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2909eaa9-6fa4-401c-a58c-0476907ab2c8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.521551] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dca0ec0-c0fd-40ed-b667-9aed0093087d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.545434] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Instance VIF info [] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 547.558596] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 547.559153] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a1d90c9b-d15c-4d57-be8e-49867e028e32 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.575191] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Created folder: OpenStack in parent group-v4. [ 547.578029] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Creating folder: Project (5ef32534b71f4d0f918db463bea2a31f). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 547.578029] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-44953982-5648-4f7d-bba2-fc76b234a7a5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.591226] env[62070]: DEBUG nova.compute.manager [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 547.596572] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Created folder: Project (5ef32534b71f4d0f918db463bea2a31f) in parent group-v245319. [ 547.596572] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Creating folder: Instances. Parent ref: group-v245320. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 547.596572] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-979d028a-7856-42b4-a973-cd4d21de7c10 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.618085] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Created folder: Instances in parent group-v245320. [ 547.618085] env[62070]: DEBUG oslo.service.loopingcall [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 547.618085] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 283e7488-1240-475f-a74d-809251950774] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 547.618085] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-418f4ac4-e1c4-4069-96ac-079e01f616ae {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.643576] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.650141] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 547.650141] env[62070]: value = "task-1121409" [ 547.650141] env[62070]: _type = "Task" [ 547.650141] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.664921] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121409, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.695237] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquiring lock "deee86eb-365b-4104-8687-72abdbf3807f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.695237] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Lock "deee86eb-365b-4104-8687-72abdbf3807f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.830325] env[62070]: DEBUG nova.scheduler.client.report [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 548.124473] env[62070]: DEBUG oslo_concurrency.lockutils [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.161575] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121409, 'name': CreateVM_Task, 'duration_secs': 0.295832} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 548.161766] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 283e7488-1240-475f-a74d-809251950774] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 548.166019] env[62070]: DEBUG oslo_vmware.service [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60d7088-7b48-46f3-bffc-2a43fed992f9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.170955] env[62070]: DEBUG oslo_concurrency.lockutils [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.170955] env[62070]: DEBUG oslo_concurrency.lockutils [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.172923] env[62070]: DEBUG oslo_concurrency.lockutils [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 548.172923] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-454fa8d6-0701-4240-a32c-bf475e6211b8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.177478] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Waiting for the task: (returnval){ [ 548.177478] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]529f9c65-c37f-6898-9db3-6bb19f873327" [ 548.177478] env[62070]: _type = "Task" [ 548.177478] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.189456] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]529f9c65-c37f-6898-9db3-6bb19f873327, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.202669] env[62070]: DEBUG nova.compute.manager [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 548.341449] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.212s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.342043] env[62070]: DEBUG nova.compute.manager [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 548.352025] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.705s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.352025] env[62070]: INFO nova.compute.claims [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 548.691508] env[62070]: DEBUG oslo_concurrency.lockutils [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.691819] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 548.692076] env[62070]: DEBUG oslo_concurrency.lockutils [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.692228] env[62070]: DEBUG oslo_concurrency.lockutils [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.692644] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 548.692893] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7c0cddc1-e496-4b55-908f-fa85e7a82b3c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.716889] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 548.717064] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 548.717966] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d72f189b-908d-44d2-99fe-75c20b527030 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.727611] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.728532] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70971498-ce93-4d08-b2e5-8628255651de {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.735426] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Waiting for the task: (returnval){ [ 548.735426] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5241626f-cea6-48ed-ad64-460c7704781b" [ 548.735426] env[62070]: _type = "Task" [ 548.735426] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.749108] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5241626f-cea6-48ed-ad64-460c7704781b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.855440] env[62070]: DEBUG nova.compute.utils [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 548.856519] env[62070]: DEBUG nova.compute.manager [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 548.856779] env[62070]: DEBUG nova.network.neutron [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 549.005512] env[62070]: DEBUG nova.policy [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '68ca27bd002243078b204ca72972eeab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ef55137581b4cd59778cb8d3120854d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 549.252300] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Preparing fetch location {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 549.252300] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Creating directory with path [datastore1] vmware_temp/6b912de0-3cbe-44a2-b9f3-55cdb953342a/43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 549.252300] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9621c671-037a-49a1-9c0d-63486543e3ef {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.273066] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Created directory with path [datastore1] vmware_temp/6b912de0-3cbe-44a2-b9f3-55cdb953342a/43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 549.273897] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Fetch image to [datastore1] vmware_temp/6b912de0-3cbe-44a2-b9f3-55cdb953342a/43ea607c-7ece-4601-9b11-75c6a16aa7dd/tmp-sparse.vmdk {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 549.274223] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Downloading image file data 43ea607c-7ece-4601-9b11-75c6a16aa7dd to [datastore1] vmware_temp/6b912de0-3cbe-44a2-b9f3-55cdb953342a/43ea607c-7ece-4601-9b11-75c6a16aa7dd/tmp-sparse.vmdk on the data store datastore1 {{(pid=62070) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 549.275241] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a30749a8-f602-4060-b017-4e3649abab1a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.290856] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f37297ae-63cf-45de-b3ad-38b3cde11a71 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.303610] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d2539cd-d562-4256-83b1-af5137a14e56 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.345569] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c5e2cf-1802-416f-93eb-977b587e0d2b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.357058] env[62070]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5f41c55f-ee22-4d6b-94e5-811e2cb56d3d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.365964] env[62070]: DEBUG nova.compute.manager [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 549.397070] env[62070]: DEBUG nova.virt.vmwareapi.images [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Downloading image file data 43ea607c-7ece-4601-9b11-75c6a16aa7dd to the data store datastore1 {{(pid=62070) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 549.516390] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfb7bbcd-2699-4966-99eb-d0d3f50c2702 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.533538] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e517469-c957-4c05-8a4e-8b599b7bde16 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.538623] env[62070]: DEBUG oslo_vmware.rw_handles [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6b912de0-3cbe-44a2-b9f3-55cdb953342a/43ea607c-7ece-4601-9b11-75c6a16aa7dd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62070) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 549.633836] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d863de-1f47-4e03-99ed-817255c130b4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.644327] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7c70efd-2026-4579-a2b0-b2a91959aa25 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.658163] env[62070]: DEBUG nova.compute.provider_tree [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 549.835041] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "9b95dfcb-718e-478d-85bc-7479be9b67de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.835315] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "9b95dfcb-718e-478d-85bc-7479be9b67de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.162880] env[62070]: DEBUG nova.scheduler.client.report [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 550.217449] env[62070]: DEBUG oslo_vmware.rw_handles [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Completed reading data from the image iterator. {{(pid=62070) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 550.217679] env[62070]: DEBUG oslo_vmware.rw_handles [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6b912de0-3cbe-44a2-b9f3-55cdb953342a/43ea607c-7ece-4601-9b11-75c6a16aa7dd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62070) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 550.341180] env[62070]: DEBUG nova.compute.manager [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 550.352730] env[62070]: DEBUG nova.virt.vmwareapi.images [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Downloaded image file data 43ea607c-7ece-4601-9b11-75c6a16aa7dd to vmware_temp/6b912de0-3cbe-44a2-b9f3-55cdb953342a/43ea607c-7ece-4601-9b11-75c6a16aa7dd/tmp-sparse.vmdk on the data store datastore1 {{(pid=62070) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 550.355233] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Caching image {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 550.355233] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Copying Virtual Disk [datastore1] vmware_temp/6b912de0-3cbe-44a2-b9f3-55cdb953342a/43ea607c-7ece-4601-9b11-75c6a16aa7dd/tmp-sparse.vmdk to [datastore1] vmware_temp/6b912de0-3cbe-44a2-b9f3-55cdb953342a/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 550.356603] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc8ba9b4-b5f5-4df3-b69e-f6fe69b32bb9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.368976] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Waiting for the task: (returnval){ [ 550.368976] env[62070]: value = "task-1121410" [ 550.368976] env[62070]: _type = "Task" [ 550.368976] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.380700] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': task-1121410, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.387084] env[62070]: DEBUG nova.compute.manager [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 550.423149] env[62070]: DEBUG nova.virt.hardware [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 550.423406] env[62070]: DEBUG nova.virt.hardware [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 550.425335] env[62070]: DEBUG nova.virt.hardware [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 550.425335] env[62070]: DEBUG nova.virt.hardware [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 550.425335] env[62070]: DEBUG nova.virt.hardware [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 550.425335] env[62070]: DEBUG nova.virt.hardware [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 550.425525] env[62070]: DEBUG nova.virt.hardware [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 550.425644] env[62070]: DEBUG nova.virt.hardware [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 550.425931] env[62070]: DEBUG nova.virt.hardware [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 550.426011] env[62070]: DEBUG nova.virt.hardware [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 550.426204] env[62070]: DEBUG nova.virt.hardware [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 550.427080] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-077b6bb0-8d5e-4c29-ad69-ac318a4fdc81 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.441526] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbaa8adc-5479-43bc-aa75-1d2bd74f8b9a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.446909] env[62070]: DEBUG nova.network.neutron [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Successfully created port: 38acdc96-ca4e-4eff-b4ff-8a9bbb18fb3f {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 550.674263] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.324s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.674263] env[62070]: DEBUG nova.compute.manager [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 550.676127] env[62070]: DEBUG oslo_concurrency.lockutils [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.552s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.678362] env[62070]: INFO nova.compute.claims [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 550.875966] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.884644] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': task-1121410, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.045189] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "9fd18ad4-7c72-4a13-8c29-da660a060020" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.045556] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "9fd18ad4-7c72-4a13-8c29-da660a060020" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.190889] env[62070]: DEBUG nova.compute.utils [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 551.198389] env[62070]: DEBUG nova.compute.manager [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 551.198389] env[62070]: DEBUG nova.network.neutron [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 551.382718] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': task-1121410, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.767173} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.382999] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Copied Virtual Disk [datastore1] vmware_temp/6b912de0-3cbe-44a2-b9f3-55cdb953342a/43ea607c-7ece-4601-9b11-75c6a16aa7dd/tmp-sparse.vmdk to [datastore1] vmware_temp/6b912de0-3cbe-44a2-b9f3-55cdb953342a/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 551.384819] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Deleting the datastore file [datastore1] vmware_temp/6b912de0-3cbe-44a2-b9f3-55cdb953342a/43ea607c-7ece-4601-9b11-75c6a16aa7dd/tmp-sparse.vmdk {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 551.384819] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-103c77ff-7108-4b4b-9001-93b01691d0b9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.392856] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Waiting for the task: (returnval){ [ 551.392856] env[62070]: value = "task-1121411" [ 551.392856] env[62070]: _type = "Task" [ 551.392856] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.404318] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': task-1121411, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.463246] env[62070]: DEBUG nova.policy [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ace78bb539a249a291751004d3179f91', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f46dba8e86b34b92a7db687af8b8641b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 551.547807] env[62070]: DEBUG nova.compute.manager [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 551.651147] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Acquiring lock "768b0b5f-7d20-4bc6-87f6-b66adcce42c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.651365] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Lock "768b0b5f-7d20-4bc6-87f6-b66adcce42c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.703723] env[62070]: DEBUG nova.compute.manager [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 551.867951] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b93493cb-02ea-4f73-bd1d-4af4ec060047 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.876943] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d4c8546-5794-4c83-acb5-5135dd7726b1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.918911] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abff37fb-c284-4576-8956-675a49a7bcef {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.928326] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': task-1121411, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024823} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.930533] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 551.930533] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Moving file from [datastore1] vmware_temp/6b912de0-3cbe-44a2-b9f3-55cdb953342a/43ea607c-7ece-4601-9b11-75c6a16aa7dd to [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd. {{(pid=62070) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 551.930533] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-bf356c37-ee60-49cf-894a-a55737f60ce3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.938409] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f5ef9f-19d0-4c02-91a8-9d7649e1ef19 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.943626] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Waiting for the task: (returnval){ [ 551.943626] env[62070]: value = "task-1121412" [ 551.943626] env[62070]: _type = "Task" [ 551.943626] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.956512] env[62070]: DEBUG nova.compute.provider_tree [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 551.963177] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': task-1121412, 'name': MoveDatastoreFile_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.150493] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.154310] env[62070]: DEBUG nova.compute.manager [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 552.457598] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': task-1121412, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.027807} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.459434] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] File moved {{(pid=62070) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 552.459434] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Cleaning up location [datastore1] vmware_temp/6b912de0-3cbe-44a2-b9f3-55cdb953342a {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 552.459434] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Deleting the datastore file [datastore1] vmware_temp/6b912de0-3cbe-44a2-b9f3-55cdb953342a {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 552.459434] env[62070]: DEBUG nova.scheduler.client.report [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 552.462588] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-facfc1c6-c9d4-4c01-b254-79af2667aa91 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.473301] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Waiting for the task: (returnval){ [ 552.473301] env[62070]: value = "task-1121413" [ 552.473301] env[62070]: _type = "Task" [ 552.473301] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.482031] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': task-1121413, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.510092] env[62070]: DEBUG nova.network.neutron [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Successfully created port: 38930f3e-c522-4232-9100-7512b09dda78 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 552.683986] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.716397] env[62070]: DEBUG nova.compute.manager [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 552.758950] env[62070]: DEBUG nova.virt.hardware [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 552.759586] env[62070]: DEBUG nova.virt.hardware [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 552.759773] env[62070]: DEBUG nova.virt.hardware [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 552.759960] env[62070]: DEBUG nova.virt.hardware [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 552.760367] env[62070]: DEBUG nova.virt.hardware [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 552.760481] env[62070]: DEBUG nova.virt.hardware [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 552.760886] env[62070]: DEBUG nova.virt.hardware [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 552.761137] env[62070]: DEBUG nova.virt.hardware [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 552.761331] env[62070]: DEBUG nova.virt.hardware [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 552.761487] env[62070]: DEBUG nova.virt.hardware [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 552.761700] env[62070]: DEBUG nova.virt.hardware [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 552.763406] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ddcbe8-91bf-48e6-9428-b56f73ad2c21 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.773875] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc2585a-4990-494b-a761-32cfbe0241bc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.970680] env[62070]: DEBUG oslo_concurrency.lockutils [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.294s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 552.971489] env[62070]: DEBUG nova.compute.manager [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 552.978266] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.248s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.978266] env[62070]: INFO nova.compute.claims [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 552.996653] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': task-1121413, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025242} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.996977] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 552.998018] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-563af270-75ec-4f53-8de2-f65de7219db1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.007804] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Waiting for the task: (returnval){ [ 553.007804] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5293e6f4-b3f4-843a-dcb4-7b75b7044420" [ 553.007804] env[62070]: _type = "Task" [ 553.007804] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.019470] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5293e6f4-b3f4-843a-dcb4-7b75b7044420, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.487073] env[62070]: DEBUG nova.compute.utils [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 553.492468] env[62070]: DEBUG nova.compute.manager [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 553.492768] env[62070]: DEBUG nova.network.neutron [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 553.528688] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5293e6f4-b3f4-843a-dcb4-7b75b7044420, 'name': SearchDatastore_Task, 'duration_secs': 0.009717} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.529960] env[62070]: DEBUG oslo_concurrency.lockutils [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.530477] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 283e7488-1240-475f-a74d-809251950774/283e7488-1240-475f-a74d-809251950774.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 553.531538] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e54aecf5-ecbb-4501-a497-353c689a9f20 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.543403] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Waiting for the task: (returnval){ [ 553.543403] env[62070]: value = "task-1121414" [ 553.543403] env[62070]: _type = "Task" [ 553.543403] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.552091] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': task-1121414, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.675717] env[62070]: DEBUG nova.policy [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9938a1f56c3641149a54431b9ca04fed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '492568494bd24d1480f2e30ffbf82f5b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 553.705817] env[62070]: ERROR nova.compute.manager [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 38acdc96-ca4e-4eff-b4ff-8a9bbb18fb3f, please check neutron logs for more information. [ 553.705817] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 553.705817] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 553.705817] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 553.705817] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 553.705817] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 553.705817] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 553.705817] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 553.705817] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 553.705817] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 553.705817] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 553.705817] env[62070]: ERROR nova.compute.manager raise self.value [ 553.705817] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 553.705817] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 553.705817] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 553.705817] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 553.706437] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 553.706437] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 553.706437] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 38acdc96-ca4e-4eff-b4ff-8a9bbb18fb3f, please check neutron logs for more information. [ 553.706437] env[62070]: ERROR nova.compute.manager [ 553.706437] env[62070]: Traceback (most recent call last): [ 553.706437] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 553.706437] env[62070]: listener.cb(fileno) [ 553.706437] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 553.706437] env[62070]: result = function(*args, **kwargs) [ 553.706437] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 553.706437] env[62070]: return func(*args, **kwargs) [ 553.706437] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 553.706437] env[62070]: raise e [ 553.706437] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 553.706437] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 553.706437] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 553.706437] env[62070]: created_port_ids = self._update_ports_for_instance( [ 553.706437] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 553.706437] env[62070]: with excutils.save_and_reraise_exception(): [ 553.706437] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 553.706437] env[62070]: self.force_reraise() [ 553.706437] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 553.706437] env[62070]: raise self.value [ 553.706437] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 553.706437] env[62070]: updated_port = self._update_port( [ 553.706437] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 553.706437] env[62070]: _ensure_no_port_binding_failure(port) [ 553.706437] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 553.706437] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 553.707203] env[62070]: nova.exception.PortBindingFailed: Binding failed for port 38acdc96-ca4e-4eff-b4ff-8a9bbb18fb3f, please check neutron logs for more information. [ 553.707203] env[62070]: Removing descriptor: 14 [ 553.707203] env[62070]: ERROR nova.compute.manager [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 38acdc96-ca4e-4eff-b4ff-8a9bbb18fb3f, please check neutron logs for more information. [ 553.707203] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Traceback (most recent call last): [ 553.707203] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 553.707203] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] yield resources [ 553.707203] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 553.707203] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] self.driver.spawn(context, instance, image_meta, [ 553.707203] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 553.707203] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 553.707203] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 553.707203] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] vm_ref = self.build_virtual_machine(instance, [ 553.707532] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 553.707532] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] vif_infos = vmwarevif.get_vif_info(self._session, [ 553.707532] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 553.707532] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] for vif in network_info: [ 553.707532] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 553.707532] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] return self._sync_wrapper(fn, *args, **kwargs) [ 553.707532] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 553.707532] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] self.wait() [ 553.707532] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 553.707532] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] self[:] = self._gt.wait() [ 553.707532] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 553.707532] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] return self._exit_event.wait() [ 553.707532] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 553.707873] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] result = hub.switch() [ 553.707873] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 553.707873] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] return self.greenlet.switch() [ 553.707873] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 553.707873] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] result = function(*args, **kwargs) [ 553.707873] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 553.707873] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] return func(*args, **kwargs) [ 553.707873] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 553.707873] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] raise e [ 553.707873] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 553.707873] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] nwinfo = self.network_api.allocate_for_instance( [ 553.707873] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 553.707873] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] created_port_ids = self._update_ports_for_instance( [ 553.708278] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 553.708278] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] with excutils.save_and_reraise_exception(): [ 553.708278] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 553.708278] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] self.force_reraise() [ 553.708278] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 553.708278] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] raise self.value [ 553.708278] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 553.708278] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] updated_port = self._update_port( [ 553.708278] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 553.708278] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] _ensure_no_port_binding_failure(port) [ 553.708278] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 553.708278] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] raise exception.PortBindingFailed(port_id=port['id']) [ 553.708619] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] nova.exception.PortBindingFailed: Binding failed for port 38acdc96-ca4e-4eff-b4ff-8a9bbb18fb3f, please check neutron logs for more information. [ 553.708619] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] [ 553.708619] env[62070]: INFO nova.compute.manager [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Terminating instance [ 553.708834] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Acquiring lock "refresh_cache-9688a493-7046-49e1-b5ab-0db9cfbf37aa" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.708983] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Acquired lock "refresh_cache-9688a493-7046-49e1-b5ab-0db9cfbf37aa" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.709162] env[62070]: DEBUG nova.network.neutron [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 553.993422] env[62070]: DEBUG nova.compute.manager [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 554.058320] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': task-1121414, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468439} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.058928] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 283e7488-1240-475f-a74d-809251950774/283e7488-1240-475f-a74d-809251950774.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 554.058928] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 554.059890] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b5f4544c-d32a-4e3c-8dcb-b82cc7f98476 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.066947] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Waiting for the task: (returnval){ [ 554.066947] env[62070]: value = "task-1121415" [ 554.066947] env[62070]: _type = "Task" [ 554.066947] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.082944] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': task-1121415, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.186145] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed03201b-578f-4030-9135-d9d23e189456 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.196871] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b9c9d6-2ab1-4ef3-ad03-dc34308e18ba {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.235949] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a36df2-1074-4564-a335-b10a778c468f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.243521] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a3eb48-598e-4269-ad5f-c41c728c2497 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.261163] env[62070]: DEBUG nova.compute.provider_tree [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 554.302525] env[62070]: DEBUG nova.network.neutron [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 554.471892] env[62070]: DEBUG nova.compute.manager [req-a2f7eeb8-c383-400a-be50-4d23766ab465 req-5205c4ff-e7bd-46e1-b91d-5272480ebfb1 service nova] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Received event network-changed-38acdc96-ca4e-4eff-b4ff-8a9bbb18fb3f {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 554.471892] env[62070]: DEBUG nova.compute.manager [req-a2f7eeb8-c383-400a-be50-4d23766ab465 req-5205c4ff-e7bd-46e1-b91d-5272480ebfb1 service nova] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Refreshing instance network info cache due to event network-changed-38acdc96-ca4e-4eff-b4ff-8a9bbb18fb3f. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 554.471892] env[62070]: DEBUG oslo_concurrency.lockutils [req-a2f7eeb8-c383-400a-be50-4d23766ab465 req-5205c4ff-e7bd-46e1-b91d-5272480ebfb1 service nova] Acquiring lock "refresh_cache-9688a493-7046-49e1-b5ab-0db9cfbf37aa" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.501577] env[62070]: DEBUG nova.network.neutron [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.577976] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': task-1121415, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062768} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.578286] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 554.579506] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eb9ef1e-1cc9-453a-877c-8e191e438ad8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.603843] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] 283e7488-1240-475f-a74d-809251950774/283e7488-1240-475f-a74d-809251950774.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 554.604163] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6579a96b-db19-40bb-a330-71c264e7687e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.627494] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Waiting for the task: (returnval){ [ 554.627494] env[62070]: value = "task-1121416" [ 554.627494] env[62070]: _type = "Task" [ 554.627494] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.643594] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': task-1121416, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.665562] env[62070]: DEBUG nova.network.neutron [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Successfully created port: af01c0ca-546d-4b4d-a8cf-007e5f080e9c {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 554.765077] env[62070]: DEBUG nova.scheduler.client.report [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 555.010186] env[62070]: DEBUG nova.compute.manager [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 555.014597] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Releasing lock "refresh_cache-9688a493-7046-49e1-b5ab-0db9cfbf37aa" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.014936] env[62070]: DEBUG nova.compute.manager [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 555.015076] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 555.015595] env[62070]: DEBUG oslo_concurrency.lockutils [req-a2f7eeb8-c383-400a-be50-4d23766ab465 req-5205c4ff-e7bd-46e1-b91d-5272480ebfb1 service nova] Acquired lock "refresh_cache-9688a493-7046-49e1-b5ab-0db9cfbf37aa" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.015786] env[62070]: DEBUG nova.network.neutron [req-a2f7eeb8-c383-400a-be50-4d23766ab465 req-5205c4ff-e7bd-46e1-b91d-5272480ebfb1 service nova] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Refreshing network info cache for port 38acdc96-ca4e-4eff-b4ff-8a9bbb18fb3f {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 555.017027] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-012c25e5-b0f5-4c29-9e64-7924d274df8c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.034139] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-491cabb0-769c-4271-9855-836f843ac0cb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.060772] env[62070]: DEBUG nova.virt.hardware [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 555.060772] env[62070]: DEBUG nova.virt.hardware [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 555.060772] env[62070]: DEBUG nova.virt.hardware [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 555.061461] env[62070]: DEBUG nova.virt.hardware [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 555.061461] env[62070]: DEBUG nova.virt.hardware [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 555.061461] env[62070]: DEBUG nova.virt.hardware [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 555.061752] env[62070]: DEBUG nova.virt.hardware [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 555.061853] env[62070]: DEBUG nova.virt.hardware [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 555.062061] env[62070]: DEBUG nova.virt.hardware [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 555.062269] env[62070]: DEBUG nova.virt.hardware [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 555.062488] env[62070]: DEBUG nova.virt.hardware [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 555.072268] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b02760-8a1a-4a60-a857-c25a9dec66e0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.076461] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9688a493-7046-49e1-b5ab-0db9cfbf37aa could not be found. [ 555.076617] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 555.077111] env[62070]: INFO nova.compute.manager [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Took 0.06 seconds to destroy the instance on the hypervisor. [ 555.077442] env[62070]: DEBUG oslo.service.loopingcall [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 555.078726] env[62070]: DEBUG nova.compute.manager [-] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 555.078922] env[62070]: DEBUG nova.network.neutron [-] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 555.089989] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95cb344c-9286-4e7b-b1ec-9562b76a7b7f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.125218] env[62070]: DEBUG nova.network.neutron [-] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 555.138814] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': task-1121416, 'name': ReconfigVM_Task, 'duration_secs': 0.477636} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.138814] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Reconfigured VM instance instance-00000001 to attach disk [datastore1] 283e7488-1240-475f-a74d-809251950774/283e7488-1240-475f-a74d-809251950774.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 555.140270] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f0ee4b50-5402-4c69-94f7-e15a483566da {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.148700] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Waiting for the task: (returnval){ [ 555.148700] env[62070]: value = "task-1121417" [ 555.148700] env[62070]: _type = "Task" [ 555.148700] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.157095] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': task-1121417, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.271762] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.296s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 555.272210] env[62070]: DEBUG nova.compute.manager [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 555.275334] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.399s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.276842] env[62070]: INFO nova.compute.claims [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 555.499572] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Acquiring lock "e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.499800] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Lock "e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.576274] env[62070]: DEBUG nova.network.neutron [req-a2f7eeb8-c383-400a-be50-4d23766ab465 req-5205c4ff-e7bd-46e1-b91d-5272480ebfb1 service nova] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 555.630407] env[62070]: DEBUG nova.network.neutron [-] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.659907] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': task-1121417, 'name': Rename_Task, 'duration_secs': 0.130919} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.660296] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 555.660496] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eb6f288a-db6d-48c6-856c-7c88731a62f8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.666910] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Waiting for the task: (returnval){ [ 555.666910] env[62070]: value = "task-1121418" [ 555.666910] env[62070]: _type = "Task" [ 555.666910] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.678705] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': task-1121418, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.785401] env[62070]: DEBUG nova.compute.utils [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 555.788968] env[62070]: DEBUG nova.compute.manager [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 555.789151] env[62070]: DEBUG nova.network.neutron [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 555.938463] env[62070]: DEBUG nova.network.neutron [req-a2f7eeb8-c383-400a-be50-4d23766ab465 req-5205c4ff-e7bd-46e1-b91d-5272480ebfb1 service nova] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 556.006701] env[62070]: DEBUG nova.compute.manager [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 556.136013] env[62070]: INFO nova.compute.manager [-] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Took 1.06 seconds to deallocate network for instance. [ 556.139304] env[62070]: DEBUG nova.compute.claims [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 556.139481] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.179716] env[62070]: DEBUG oslo_vmware.api [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': task-1121418, 'name': PowerOnVM_Task, 'duration_secs': 0.466347} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.180151] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 556.180449] env[62070]: INFO nova.compute.manager [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Took 9.42 seconds to spawn the instance on the hypervisor. [ 556.181182] env[62070]: DEBUG nova.compute.manager [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 556.182104] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d58b0a0-db93-4179-bde3-0f9eb77267e4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.217739] env[62070]: DEBUG nova.policy [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e3ee2dd49154a44bcfb94832273cd52', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '94c6fc73d5a74adb8384fd156daf3f58', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 556.298683] env[62070]: DEBUG nova.compute.manager [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 556.442414] env[62070]: DEBUG oslo_concurrency.lockutils [req-a2f7eeb8-c383-400a-be50-4d23766ab465 req-5205c4ff-e7bd-46e1-b91d-5272480ebfb1 service nova] Releasing lock "refresh_cache-9688a493-7046-49e1-b5ab-0db9cfbf37aa" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 556.488030] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aa30d3c-c8d9-43be-887e-85383d60504d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.496404] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2118105-40f5-405c-80a0-931af8797b6b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.536314] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-103abe49-a4d7-4b33-a9e0-09fee0fe2cc3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.544895] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2affaf2c-285f-4bd0-8c95-5ddcdd3cc6cb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.551873] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.561153] env[62070]: DEBUG nova.compute.provider_tree [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 556.712060] env[62070]: INFO nova.compute.manager [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Took 14.19 seconds to build instance. [ 556.745642] env[62070]: ERROR nova.compute.manager [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 38930f3e-c522-4232-9100-7512b09dda78, please check neutron logs for more information. [ 556.745642] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 556.745642] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 556.745642] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 556.745642] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 556.745642] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 556.745642] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 556.745642] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 556.745642] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 556.745642] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 556.745642] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 556.745642] env[62070]: ERROR nova.compute.manager raise self.value [ 556.745642] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 556.745642] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 556.745642] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 556.745642] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 556.746223] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 556.746223] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 556.746223] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 38930f3e-c522-4232-9100-7512b09dda78, please check neutron logs for more information. [ 556.746223] env[62070]: ERROR nova.compute.manager [ 556.746223] env[62070]: Traceback (most recent call last): [ 556.746223] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 556.746223] env[62070]: listener.cb(fileno) [ 556.746223] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 556.746223] env[62070]: result = function(*args, **kwargs) [ 556.746223] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 556.746223] env[62070]: return func(*args, **kwargs) [ 556.746223] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 556.746223] env[62070]: raise e [ 556.746223] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 556.746223] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 556.746223] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 556.746223] env[62070]: created_port_ids = self._update_ports_for_instance( [ 556.746223] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 556.746223] env[62070]: with excutils.save_and_reraise_exception(): [ 556.746223] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 556.746223] env[62070]: self.force_reraise() [ 556.746223] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 556.746223] env[62070]: raise self.value [ 556.746223] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 556.746223] env[62070]: updated_port = self._update_port( [ 556.746223] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 556.746223] env[62070]: _ensure_no_port_binding_failure(port) [ 556.746223] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 556.746223] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 556.746912] env[62070]: nova.exception.PortBindingFailed: Binding failed for port 38930f3e-c522-4232-9100-7512b09dda78, please check neutron logs for more information. [ 556.746912] env[62070]: Removing descriptor: 16 [ 556.746912] env[62070]: ERROR nova.compute.manager [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 38930f3e-c522-4232-9100-7512b09dda78, please check neutron logs for more information. [ 556.746912] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Traceback (most recent call last): [ 556.746912] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 556.746912] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] yield resources [ 556.746912] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 556.746912] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] self.driver.spawn(context, instance, image_meta, [ 556.746912] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 556.746912] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] self._vmops.spawn(context, instance, image_meta, injected_files, [ 556.746912] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 556.746912] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] vm_ref = self.build_virtual_machine(instance, [ 556.747231] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 556.747231] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] vif_infos = vmwarevif.get_vif_info(self._session, [ 556.747231] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 556.747231] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] for vif in network_info: [ 556.747231] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 556.747231] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] return self._sync_wrapper(fn, *args, **kwargs) [ 556.747231] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 556.747231] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] self.wait() [ 556.747231] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 556.747231] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] self[:] = self._gt.wait() [ 556.747231] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 556.747231] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] return self._exit_event.wait() [ 556.747231] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 556.747554] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] result = hub.switch() [ 556.747554] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 556.747554] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] return self.greenlet.switch() [ 556.747554] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 556.747554] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] result = function(*args, **kwargs) [ 556.747554] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 556.747554] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] return func(*args, **kwargs) [ 556.747554] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 556.747554] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] raise e [ 556.747554] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 556.747554] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] nwinfo = self.network_api.allocate_for_instance( [ 556.747554] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 556.747554] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] created_port_ids = self._update_ports_for_instance( [ 556.747862] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 556.747862] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] with excutils.save_and_reraise_exception(): [ 556.747862] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 556.747862] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] self.force_reraise() [ 556.747862] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 556.747862] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] raise self.value [ 556.747862] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 556.747862] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] updated_port = self._update_port( [ 556.747862] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 556.747862] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] _ensure_no_port_binding_failure(port) [ 556.747862] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 556.747862] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] raise exception.PortBindingFailed(port_id=port['id']) [ 556.748172] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] nova.exception.PortBindingFailed: Binding failed for port 38930f3e-c522-4232-9100-7512b09dda78, please check neutron logs for more information. [ 556.748172] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] [ 556.748172] env[62070]: INFO nova.compute.manager [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Terminating instance [ 556.755617] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Acquiring lock "refresh_cache-7221a720-8ab9-44fd-abe2-8f8fc19b6433" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.755617] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Acquired lock "refresh_cache-7221a720-8ab9-44fd-abe2-8f8fc19b6433" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.755617] env[62070]: DEBUG nova.network.neutron [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 556.868042] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 556.869406] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 556.869406] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Starting heal instance info cache {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 556.869518] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Rebuilding the list of instances to heal {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 557.065397] env[62070]: DEBUG nova.scheduler.client.report [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 557.157155] env[62070]: DEBUG nova.network.neutron [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Successfully created port: fb4707e9-7728-45c0-9830-a99c0cf681b3 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 557.215894] env[62070]: DEBUG oslo_concurrency.lockutils [None req-29cb1636-fccd-4431-8dd2-c1cf16ef9eb0 tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Lock "283e7488-1240-475f-a74d-809251950774" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.702s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.319160] env[62070]: DEBUG nova.compute.manager [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 557.323525] env[62070]: DEBUG nova.network.neutron [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 557.339127] env[62070]: DEBUG nova.virt.hardware [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 557.339127] env[62070]: DEBUG nova.virt.hardware [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 557.339127] env[62070]: DEBUG nova.virt.hardware [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 557.339127] env[62070]: DEBUG nova.virt.hardware [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 557.339525] env[62070]: DEBUG nova.virt.hardware [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 557.339768] env[62070]: DEBUG nova.virt.hardware [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 557.340051] env[62070]: DEBUG nova.virt.hardware [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 557.340555] env[62070]: DEBUG nova.virt.hardware [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 557.340794] env[62070]: DEBUG nova.virt.hardware [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 557.341039] env[62070]: DEBUG nova.virt.hardware [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 557.341280] env[62070]: DEBUG nova.virt.hardware [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 557.342316] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05bfe759-f2ee-4e68-8c5b-469c71c9844d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.352300] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51d208ad-a179-4b8d-9ba4-5846e9a19a69 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.375077] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Skipping network cache update for instance because it is Building. {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 557.375310] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Skipping network cache update for instance because it is Building. {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 557.375490] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Skipping network cache update for instance because it is Building. {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 557.375659] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Skipping network cache update for instance because it is Building. {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 557.375826] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Skipping network cache update for instance because it is Building. {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 557.419119] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "refresh_cache-283e7488-1240-475f-a74d-809251950774" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.419119] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquired lock "refresh_cache-283e7488-1240-475f-a74d-809251950774" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.419501] env[62070]: DEBUG nova.network.neutron [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 283e7488-1240-475f-a74d-809251950774] Forcefully refreshing network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 557.419933] env[62070]: DEBUG nova.objects.instance [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lazy-loading 'info_cache' on Instance uuid 283e7488-1240-475f-a74d-809251950774 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 557.574072] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.297s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.577235] env[62070]: DEBUG nova.compute.manager [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 557.580498] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.430s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.582695] env[62070]: INFO nova.compute.claims [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 557.592435] env[62070]: DEBUG nova.network.neutron [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.765814] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Acquiring lock "5936aded-90fc-4f77-8103-8c9e1912379c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.765814] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Lock "5936aded-90fc-4f77-8103-8c9e1912379c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.802281] env[62070]: DEBUG nova.compute.manager [req-2d644bb5-850b-48c3-8ff7-9902edd75ed7 req-91d1163c-cd6b-49aa-a380-3a1cc9484e47 service nova] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Received event network-changed-38930f3e-c522-4232-9100-7512b09dda78 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 557.803339] env[62070]: DEBUG nova.compute.manager [req-2d644bb5-850b-48c3-8ff7-9902edd75ed7 req-91d1163c-cd6b-49aa-a380-3a1cc9484e47 service nova] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Refreshing instance network info cache due to event network-changed-38930f3e-c522-4232-9100-7512b09dda78. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 557.804132] env[62070]: DEBUG oslo_concurrency.lockutils [req-2d644bb5-850b-48c3-8ff7-9902edd75ed7 req-91d1163c-cd6b-49aa-a380-3a1cc9484e47 service nova] Acquiring lock "refresh_cache-7221a720-8ab9-44fd-abe2-8f8fc19b6433" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.097176] env[62070]: DEBUG nova.compute.utils [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 558.106909] env[62070]: DEBUG nova.compute.manager [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 558.106909] env[62070]: DEBUG nova.network.neutron [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 558.111113] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Releasing lock "refresh_cache-7221a720-8ab9-44fd-abe2-8f8fc19b6433" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 558.112182] env[62070]: DEBUG nova.compute.manager [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 558.112401] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 558.112941] env[62070]: DEBUG oslo_concurrency.lockutils [req-2d644bb5-850b-48c3-8ff7-9902edd75ed7 req-91d1163c-cd6b-49aa-a380-3a1cc9484e47 service nova] Acquired lock "refresh_cache-7221a720-8ab9-44fd-abe2-8f8fc19b6433" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.113136] env[62070]: DEBUG nova.network.neutron [req-2d644bb5-850b-48c3-8ff7-9902edd75ed7 req-91d1163c-cd6b-49aa-a380-3a1cc9484e47 service nova] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Refreshing network info cache for port 38930f3e-c522-4232-9100-7512b09dda78 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 558.121140] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f7f4b105-0a9b-47e2-9028-ab32e9e883e5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.138513] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f103b56-03ce-4216-a20f-5bee3295bb14 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.168023] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7221a720-8ab9-44fd-abe2-8f8fc19b6433 could not be found. [ 558.168273] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 558.168455] env[62070]: INFO nova.compute.manager [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Took 0.06 seconds to destroy the instance on the hypervisor. [ 558.168703] env[62070]: DEBUG oslo.service.loopingcall [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 558.168925] env[62070]: DEBUG nova.compute.manager [-] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 558.169033] env[62070]: DEBUG nova.network.neutron [-] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 558.190193] env[62070]: DEBUG nova.network.neutron [-] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 558.270953] env[62070]: DEBUG nova.compute.manager [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 558.335445] env[62070]: DEBUG nova.compute.manager [req-af697017-0315-4778-877c-16d893a4f3c5 req-b9668320-af7a-4871-b6cb-91171b35aa3d service nova] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Received event network-vif-deleted-38acdc96-ca4e-4eff-b4ff-8a9bbb18fb3f {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 558.342194] env[62070]: DEBUG nova.policy [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '093d4b68ffd04d4d951f5be91bfc76e8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eac8e5edc8f14fff89aba7c8cb6cac5d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 558.386260] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Acquiring lock "317f20e9-6ba1-4b41-b298-5dd844f323ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.386494] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Lock "317f20e9-6ba1-4b41-b298-5dd844f323ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.481572] env[62070]: DEBUG nova.network.neutron [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 283e7488-1240-475f-a74d-809251950774] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 558.606705] env[62070]: DEBUG nova.compute.manager [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 558.659062] env[62070]: DEBUG nova.network.neutron [req-2d644bb5-850b-48c3-8ff7-9902edd75ed7 req-91d1163c-cd6b-49aa-a380-3a1cc9484e47 service nova] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 558.699367] env[62070]: DEBUG nova.network.neutron [-] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 558.711457] env[62070]: ERROR nova.compute.manager [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port af01c0ca-546d-4b4d-a8cf-007e5f080e9c, please check neutron logs for more information. [ 558.711457] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 558.711457] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 558.711457] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 558.711457] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 558.711457] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 558.711457] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 558.711457] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 558.711457] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 558.711457] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 558.711457] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 558.711457] env[62070]: ERROR nova.compute.manager raise self.value [ 558.711457] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 558.711457] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 558.711457] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 558.711457] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 558.712168] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 558.712168] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 558.712168] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port af01c0ca-546d-4b4d-a8cf-007e5f080e9c, please check neutron logs for more information. [ 558.712168] env[62070]: ERROR nova.compute.manager [ 558.712168] env[62070]: Traceback (most recent call last): [ 558.712168] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 558.712168] env[62070]: listener.cb(fileno) [ 558.712168] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 558.712168] env[62070]: result = function(*args, **kwargs) [ 558.712168] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 558.712168] env[62070]: return func(*args, **kwargs) [ 558.712168] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 558.712168] env[62070]: raise e [ 558.712168] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 558.712168] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 558.712168] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 558.712168] env[62070]: created_port_ids = self._update_ports_for_instance( [ 558.712168] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 558.712168] env[62070]: with excutils.save_and_reraise_exception(): [ 558.712168] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 558.712168] env[62070]: self.force_reraise() [ 558.712168] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 558.712168] env[62070]: raise self.value [ 558.712168] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 558.712168] env[62070]: updated_port = self._update_port( [ 558.712168] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 558.712168] env[62070]: _ensure_no_port_binding_failure(port) [ 558.712168] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 558.712168] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 558.712957] env[62070]: nova.exception.PortBindingFailed: Binding failed for port af01c0ca-546d-4b4d-a8cf-007e5f080e9c, please check neutron logs for more information. [ 558.712957] env[62070]: Removing descriptor: 18 [ 558.712957] env[62070]: ERROR nova.compute.manager [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port af01c0ca-546d-4b4d-a8cf-007e5f080e9c, please check neutron logs for more information. [ 558.712957] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Traceback (most recent call last): [ 558.712957] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 558.712957] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] yield resources [ 558.712957] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 558.712957] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] self.driver.spawn(context, instance, image_meta, [ 558.712957] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 558.712957] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 558.712957] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 558.712957] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] vm_ref = self.build_virtual_machine(instance, [ 558.713330] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 558.713330] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] vif_infos = vmwarevif.get_vif_info(self._session, [ 558.713330] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 558.713330] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] for vif in network_info: [ 558.713330] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 558.713330] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] return self._sync_wrapper(fn, *args, **kwargs) [ 558.713330] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 558.713330] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] self.wait() [ 558.713330] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 558.713330] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] self[:] = self._gt.wait() [ 558.713330] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 558.713330] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] return self._exit_event.wait() [ 558.713330] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 558.713696] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] result = hub.switch() [ 558.713696] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 558.713696] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] return self.greenlet.switch() [ 558.713696] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 558.713696] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] result = function(*args, **kwargs) [ 558.713696] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 558.713696] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] return func(*args, **kwargs) [ 558.713696] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 558.713696] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] raise e [ 558.713696] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 558.713696] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] nwinfo = self.network_api.allocate_for_instance( [ 558.713696] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 558.713696] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] created_port_ids = self._update_ports_for_instance( [ 558.714083] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 558.714083] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] with excutils.save_and_reraise_exception(): [ 558.714083] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 558.714083] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] self.force_reraise() [ 558.714083] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 558.714083] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] raise self.value [ 558.714083] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 558.714083] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] updated_port = self._update_port( [ 558.714083] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 558.714083] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] _ensure_no_port_binding_failure(port) [ 558.714083] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 558.714083] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] raise exception.PortBindingFailed(port_id=port['id']) [ 558.714418] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] nova.exception.PortBindingFailed: Binding failed for port af01c0ca-546d-4b4d-a8cf-007e5f080e9c, please check neutron logs for more information. [ 558.714418] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] [ 558.714418] env[62070]: INFO nova.compute.manager [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Terminating instance [ 558.717669] env[62070]: DEBUG oslo_concurrency.lockutils [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Acquiring lock "refresh_cache-87462fbe-d62d-4b40-880c-a1785c9ed5d4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.717892] env[62070]: DEBUG oslo_concurrency.lockutils [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Acquired lock "refresh_cache-87462fbe-d62d-4b40-880c-a1785c9ed5d4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.718160] env[62070]: DEBUG nova.network.neutron [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 558.814555] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.841678] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Acquiring lock "88251634-8add-4216-b789-dfee77a1ae09" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.842024] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Lock "88251634-8add-4216-b789-dfee77a1ae09" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.877986] env[62070]: DEBUG nova.network.neutron [req-2d644bb5-850b-48c3-8ff7-9902edd75ed7 req-91d1163c-cd6b-49aa-a380-3a1cc9484e47 service nova] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 558.890787] env[62070]: DEBUG nova.compute.manager [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 558.994440] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a9424af-a132-49ba-b381-b5d80cfcc4ee {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.005678] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d614b31-b915-41b7-9f40-993fedabde04 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.044947] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d3aba5c-dfb6-45bf-8638-7f10a1d5780a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.056025] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a09b64d-403a-4ac3-ad30-146f99eb32f6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.071424] env[62070]: DEBUG nova.compute.provider_tree [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 559.188417] env[62070]: DEBUG nova.network.neutron [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 283e7488-1240-475f-a74d-809251950774] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 559.208416] env[62070]: INFO nova.compute.manager [-] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Took 1.04 seconds to deallocate network for instance. [ 559.212174] env[62070]: DEBUG nova.compute.claims [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 559.215018] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.243945] env[62070]: DEBUG nova.network.neutron [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 559.361677] env[62070]: DEBUG nova.network.neutron [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Successfully created port: e3164438-df26-48c5-84eb-9925989f48e6 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 559.382244] env[62070]: DEBUG oslo_concurrency.lockutils [req-2d644bb5-850b-48c3-8ff7-9902edd75ed7 req-91d1163c-cd6b-49aa-a380-3a1cc9484e47 service nova] Releasing lock "refresh_cache-7221a720-8ab9-44fd-abe2-8f8fc19b6433" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 559.496342] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.571400] env[62070]: DEBUG nova.network.neutron [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 559.576074] env[62070]: DEBUG nova.scheduler.client.report [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 559.629151] env[62070]: DEBUG nova.compute.manager [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 559.632690] env[62070]: DEBUG nova.compute.manager [None req-82e48fec-c72e-4944-bc17-68bdd5c58839 tempest-ServerDiagnosticsV248Test-296067008 tempest-ServerDiagnosticsV248Test-296067008-project-admin] [instance: 283e7488-1240-475f-a74d-809251950774] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 559.634281] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38fd5db8-2eba-4032-97ce-b3df5370234f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.642485] env[62070]: INFO nova.compute.manager [None req-82e48fec-c72e-4944-bc17-68bdd5c58839 tempest-ServerDiagnosticsV248Test-296067008 tempest-ServerDiagnosticsV248Test-296067008-project-admin] [instance: 283e7488-1240-475f-a74d-809251950774] Retrieving diagnostics [ 559.643582] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c2a8687-9fb7-47e9-a42e-cbae15e088c3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.689972] env[62070]: DEBUG nova.virt.hardware [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 559.689972] env[62070]: DEBUG nova.virt.hardware [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 559.689972] env[62070]: DEBUG nova.virt.hardware [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 559.689972] env[62070]: DEBUG nova.virt.hardware [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 559.690179] env[62070]: DEBUG nova.virt.hardware [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 559.690669] env[62070]: DEBUG nova.virt.hardware [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 559.691031] env[62070]: DEBUG nova.virt.hardware [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 559.691328] env[62070]: DEBUG nova.virt.hardware [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 559.691618] env[62070]: DEBUG nova.virt.hardware [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 559.691900] env[62070]: DEBUG nova.virt.hardware [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 559.692484] env[62070]: DEBUG nova.virt.hardware [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 559.693109] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Releasing lock "refresh_cache-283e7488-1240-475f-a74d-809251950774" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 559.693109] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 283e7488-1240-475f-a74d-809251950774] Updated the network info_cache for instance {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 559.694027] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e290fdde-8102-4963-89f7-8cfc6c8bc8fd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.697134] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.697459] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.698070] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.698298] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.698484] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.698661] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.698822] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62070) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 559.699989] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.704605] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f05a9f33-5c26-4dfb-9110-09422c39dd03 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.081228] env[62070]: DEBUG oslo_concurrency.lockutils [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Releasing lock "refresh_cache-87462fbe-d62d-4b40-880c-a1785c9ed5d4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.082042] env[62070]: DEBUG nova.compute.manager [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 560.082042] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 560.082751] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.502s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 560.083257] env[62070]: DEBUG nova.compute.manager [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 560.087916] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-101b496c-cf89-4952-96c7-6687a85058cb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.091221] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.407s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.094156] env[62070]: INFO nova.compute.claims [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 560.105221] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-258ab1a9-ac39-4c69-b12a-1465b13dda0a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.130542] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 87462fbe-d62d-4b40-880c-a1785c9ed5d4 could not be found. [ 560.131074] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 560.131074] env[62070]: INFO nova.compute.manager [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Took 0.05 seconds to destroy the instance on the hypervisor. [ 560.131074] env[62070]: DEBUG oslo.service.loopingcall [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 560.131248] env[62070]: DEBUG nova.compute.manager [-] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 560.134604] env[62070]: DEBUG nova.network.neutron [-] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 560.174542] env[62070]: DEBUG nova.network.neutron [-] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 560.205031] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.595363] env[62070]: DEBUG nova.compute.utils [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 560.599919] env[62070]: DEBUG nova.compute.manager [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 560.600156] env[62070]: DEBUG nova.network.neutron [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 560.679443] env[62070]: DEBUG nova.network.neutron [-] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 560.815506] env[62070]: ERROR nova.compute.manager [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fb4707e9-7728-45c0-9830-a99c0cf681b3, please check neutron logs for more information. [ 560.815506] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 560.815506] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 560.815506] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 560.815506] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 560.815506] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 560.815506] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 560.815506] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 560.815506] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 560.815506] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 560.815506] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 560.815506] env[62070]: ERROR nova.compute.manager raise self.value [ 560.815506] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 560.815506] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 560.815506] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 560.815506] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 560.816830] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 560.816830] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 560.816830] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fb4707e9-7728-45c0-9830-a99c0cf681b3, please check neutron logs for more information. [ 560.816830] env[62070]: ERROR nova.compute.manager [ 560.818956] env[62070]: Traceback (most recent call last): [ 560.818956] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 560.818956] env[62070]: listener.cb(fileno) [ 560.818956] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 560.818956] env[62070]: result = function(*args, **kwargs) [ 560.818956] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 560.818956] env[62070]: return func(*args, **kwargs) [ 560.818956] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 560.818956] env[62070]: raise e [ 560.818956] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 560.818956] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 560.818956] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 560.818956] env[62070]: created_port_ids = self._update_ports_for_instance( [ 560.818956] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 560.818956] env[62070]: with excutils.save_and_reraise_exception(): [ 560.818956] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 560.818956] env[62070]: self.force_reraise() [ 560.818956] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 560.818956] env[62070]: raise self.value [ 560.818956] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 560.818956] env[62070]: updated_port = self._update_port( [ 560.818956] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 560.818956] env[62070]: _ensure_no_port_binding_failure(port) [ 560.818956] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 560.818956] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 560.818956] env[62070]: nova.exception.PortBindingFailed: Binding failed for port fb4707e9-7728-45c0-9830-a99c0cf681b3, please check neutron logs for more information. [ 560.818956] env[62070]: Removing descriptor: 19 [ 560.819729] env[62070]: ERROR nova.compute.manager [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fb4707e9-7728-45c0-9830-a99c0cf681b3, please check neutron logs for more information. [ 560.819729] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] Traceback (most recent call last): [ 560.819729] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 560.819729] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] yield resources [ 560.819729] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 560.819729] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] self.driver.spawn(context, instance, image_meta, [ 560.819729] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 560.819729] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 560.819729] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 560.819729] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] vm_ref = self.build_virtual_machine(instance, [ 560.819729] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 560.820044] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] vif_infos = vmwarevif.get_vif_info(self._session, [ 560.820044] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 560.820044] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] for vif in network_info: [ 560.820044] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 560.820044] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] return self._sync_wrapper(fn, *args, **kwargs) [ 560.820044] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 560.820044] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] self.wait() [ 560.820044] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 560.820044] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] self[:] = self._gt.wait() [ 560.820044] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 560.820044] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] return self._exit_event.wait() [ 560.820044] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 560.820044] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] result = hub.switch() [ 560.820363] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 560.820363] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] return self.greenlet.switch() [ 560.820363] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 560.820363] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] result = function(*args, **kwargs) [ 560.820363] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 560.820363] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] return func(*args, **kwargs) [ 560.820363] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 560.820363] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] raise e [ 560.820363] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 560.820363] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] nwinfo = self.network_api.allocate_for_instance( [ 560.820363] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 560.820363] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] created_port_ids = self._update_ports_for_instance( [ 560.820363] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 560.820679] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] with excutils.save_and_reraise_exception(): [ 560.820679] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 560.820679] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] self.force_reraise() [ 560.820679] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 560.820679] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] raise self.value [ 560.820679] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 560.820679] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] updated_port = self._update_port( [ 560.820679] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 560.820679] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] _ensure_no_port_binding_failure(port) [ 560.820679] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 560.820679] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] raise exception.PortBindingFailed(port_id=port['id']) [ 560.820679] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] nova.exception.PortBindingFailed: Binding failed for port fb4707e9-7728-45c0-9830-a99c0cf681b3, please check neutron logs for more information. [ 560.820679] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] [ 560.821022] env[62070]: INFO nova.compute.manager [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Terminating instance [ 560.824771] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquiring lock "refresh_cache-deee86eb-365b-4104-8687-72abdbf3807f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 560.824771] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquired lock "refresh_cache-deee86eb-365b-4104-8687-72abdbf3807f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.824771] env[62070]: DEBUG nova.network.neutron [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 560.904275] env[62070]: DEBUG nova.policy [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7534320dee8f486e90f5174aa94d00bd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '925dff51764c4b56ae7ea05fbde2ecdd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 561.104079] env[62070]: DEBUG nova.compute.manager [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 561.185592] env[62070]: INFO nova.compute.manager [-] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Took 1.05 seconds to deallocate network for instance. [ 561.191836] env[62070]: DEBUG nova.compute.claims [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 561.191836] env[62070]: DEBUG oslo_concurrency.lockutils [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.252293] env[62070]: DEBUG nova.compute.manager [req-73602a73-3fe4-4bd3-9321-523a62f590f3 req-e7829635-ac94-404c-a194-01433dc8508e service nova] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Received event network-vif-deleted-38930f3e-c522-4232-9100-7512b09dda78 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 561.252293] env[62070]: DEBUG nova.compute.manager [req-73602a73-3fe4-4bd3-9321-523a62f590f3 req-e7829635-ac94-404c-a194-01433dc8508e service nova] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Received event network-changed-af01c0ca-546d-4b4d-a8cf-007e5f080e9c {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 561.252293] env[62070]: DEBUG nova.compute.manager [req-73602a73-3fe4-4bd3-9321-523a62f590f3 req-e7829635-ac94-404c-a194-01433dc8508e service nova] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Refreshing instance network info cache due to event network-changed-af01c0ca-546d-4b4d-a8cf-007e5f080e9c. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 561.252293] env[62070]: DEBUG oslo_concurrency.lockutils [req-73602a73-3fe4-4bd3-9321-523a62f590f3 req-e7829635-ac94-404c-a194-01433dc8508e service nova] Acquiring lock "refresh_cache-87462fbe-d62d-4b40-880c-a1785c9ed5d4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.252293] env[62070]: DEBUG oslo_concurrency.lockutils [req-73602a73-3fe4-4bd3-9321-523a62f590f3 req-e7829635-ac94-404c-a194-01433dc8508e service nova] Acquired lock "refresh_cache-87462fbe-d62d-4b40-880c-a1785c9ed5d4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.252715] env[62070]: DEBUG nova.network.neutron [req-73602a73-3fe4-4bd3-9321-523a62f590f3 req-e7829635-ac94-404c-a194-01433dc8508e service nova] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Refreshing network info cache for port af01c0ca-546d-4b4d-a8cf-007e5f080e9c {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 561.368676] env[62070]: ERROR nova.compute.manager [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e3164438-df26-48c5-84eb-9925989f48e6, please check neutron logs for more information. [ 561.368676] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 561.368676] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 561.368676] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 561.368676] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 561.368676] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 561.368676] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 561.368676] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 561.368676] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 561.368676] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 561.368676] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 561.368676] env[62070]: ERROR nova.compute.manager raise self.value [ 561.368676] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 561.368676] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 561.368676] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 561.368676] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 561.371661] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 561.371661] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 561.371661] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e3164438-df26-48c5-84eb-9925989f48e6, please check neutron logs for more information. [ 561.371661] env[62070]: ERROR nova.compute.manager [ 561.371661] env[62070]: Traceback (most recent call last): [ 561.371661] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 561.371661] env[62070]: listener.cb(fileno) [ 561.371661] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 561.371661] env[62070]: result = function(*args, **kwargs) [ 561.371661] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 561.371661] env[62070]: return func(*args, **kwargs) [ 561.371661] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 561.371661] env[62070]: raise e [ 561.371661] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 561.371661] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 561.371661] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 561.371661] env[62070]: created_port_ids = self._update_ports_for_instance( [ 561.371661] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 561.371661] env[62070]: with excutils.save_and_reraise_exception(): [ 561.371661] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 561.371661] env[62070]: self.force_reraise() [ 561.371661] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 561.371661] env[62070]: raise self.value [ 561.371661] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 561.371661] env[62070]: updated_port = self._update_port( [ 561.371661] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 561.371661] env[62070]: _ensure_no_port_binding_failure(port) [ 561.371661] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 561.371661] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 561.372608] env[62070]: nova.exception.PortBindingFailed: Binding failed for port e3164438-df26-48c5-84eb-9925989f48e6, please check neutron logs for more information. [ 561.372608] env[62070]: Removing descriptor: 16 [ 561.372608] env[62070]: ERROR nova.compute.manager [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e3164438-df26-48c5-84eb-9925989f48e6, please check neutron logs for more information. [ 561.372608] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Traceback (most recent call last): [ 561.372608] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 561.372608] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] yield resources [ 561.372608] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 561.372608] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] self.driver.spawn(context, instance, image_meta, [ 561.372608] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 561.372608] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] self._vmops.spawn(context, instance, image_meta, injected_files, [ 561.372608] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 561.372608] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] vm_ref = self.build_virtual_machine(instance, [ 561.373075] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 561.373075] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] vif_infos = vmwarevif.get_vif_info(self._session, [ 561.373075] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 561.373075] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] for vif in network_info: [ 561.373075] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 561.373075] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] return self._sync_wrapper(fn, *args, **kwargs) [ 561.373075] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 561.373075] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] self.wait() [ 561.373075] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 561.373075] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] self[:] = self._gt.wait() [ 561.373075] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 561.373075] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] return self._exit_event.wait() [ 561.373075] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 561.373393] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] result = hub.switch() [ 561.373393] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 561.373393] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] return self.greenlet.switch() [ 561.373393] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 561.373393] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] result = function(*args, **kwargs) [ 561.373393] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 561.373393] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] return func(*args, **kwargs) [ 561.373393] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 561.373393] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] raise e [ 561.373393] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 561.373393] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] nwinfo = self.network_api.allocate_for_instance( [ 561.373393] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 561.373393] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] created_port_ids = self._update_ports_for_instance( [ 561.373711] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 561.373711] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] with excutils.save_and_reraise_exception(): [ 561.373711] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 561.373711] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] self.force_reraise() [ 561.373711] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 561.373711] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] raise self.value [ 561.373711] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 561.373711] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] updated_port = self._update_port( [ 561.373711] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 561.373711] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] _ensure_no_port_binding_failure(port) [ 561.373711] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 561.373711] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] raise exception.PortBindingFailed(port_id=port['id']) [ 561.374155] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] nova.exception.PortBindingFailed: Binding failed for port e3164438-df26-48c5-84eb-9925989f48e6, please check neutron logs for more information. [ 561.374155] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] [ 561.374155] env[62070]: INFO nova.compute.manager [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Terminating instance [ 561.379030] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "refresh_cache-9b95dfcb-718e-478d-85bc-7479be9b67de" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.379198] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquired lock "refresh_cache-9b95dfcb-718e-478d-85bc-7479be9b67de" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.379354] env[62070]: DEBUG nova.network.neutron [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 561.380942] env[62070]: DEBUG nova.network.neutron [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 561.392504] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebb62dd-563c-434f-a817-dd40d84ab2c3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.405859] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e5de435-c343-4111-b22d-d65330889e87 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.438372] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd8889d-3085-4fe9-89a2-9b8746b15c5a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.446019] env[62070]: DEBUG nova.network.neutron [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 561.448549] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dac0809-e7a1-40cb-8661-dfceacd3e51c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.463944] env[62070]: DEBUG nova.compute.provider_tree [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 561.650226] env[62070]: DEBUG nova.network.neutron [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Successfully created port: 2b87ed7e-d1f9-4892-bf38-4d0846594037 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 561.813259] env[62070]: DEBUG nova.network.neutron [req-73602a73-3fe4-4bd3-9321-523a62f590f3 req-e7829635-ac94-404c-a194-01433dc8508e service nova] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 561.827923] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Acquiring lock "495a15b2-20bd-44d2-8020-816031e89832" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.828285] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Lock "495a15b2-20bd-44d2-8020-816031e89832" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.947452] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Releasing lock "refresh_cache-deee86eb-365b-4104-8687-72abdbf3807f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 561.947452] env[62070]: DEBUG nova.compute.manager [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 561.947452] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 561.948185] env[62070]: DEBUG nova.network.neutron [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 561.953328] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-daa145d7-f060-454a-aca1-267cc7b010a4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.963115] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc9b5342-f547-4918-ab74-e9d10df1fe97 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.974570] env[62070]: DEBUG nova.scheduler.client.report [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 561.989348] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance deee86eb-365b-4104-8687-72abdbf3807f could not be found. [ 561.991011] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 561.991011] env[62070]: INFO nova.compute.manager [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 561.991011] env[62070]: DEBUG oslo.service.loopingcall [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 561.991011] env[62070]: DEBUG nova.compute.manager [-] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 561.991011] env[62070]: DEBUG nova.network.neutron [-] [instance: deee86eb-365b-4104-8687-72abdbf3807f] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 562.086211] env[62070]: DEBUG nova.network.neutron [-] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 562.131774] env[62070]: DEBUG nova.compute.manager [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 562.162828] env[62070]: DEBUG nova.virt.hardware [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 562.163171] env[62070]: DEBUG nova.virt.hardware [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 562.163244] env[62070]: DEBUG nova.virt.hardware [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 562.163670] env[62070]: DEBUG nova.virt.hardware [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 562.163996] env[62070]: DEBUG nova.virt.hardware [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 562.164078] env[62070]: DEBUG nova.virt.hardware [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 562.164352] env[62070]: DEBUG nova.virt.hardware [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 562.164745] env[62070]: DEBUG nova.virt.hardware [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 562.165009] env[62070]: DEBUG nova.virt.hardware [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 562.165248] env[62070]: DEBUG nova.virt.hardware [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 562.165506] env[62070]: DEBUG nova.virt.hardware [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 562.166628] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2372f72-fd6d-4286-beda-aea7ea052149 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.176996] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32af6764-3309-4786-88ea-d7a17b7c1a47 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.196499] env[62070]: DEBUG nova.network.neutron [req-73602a73-3fe4-4bd3-9321-523a62f590f3 req-e7829635-ac94-404c-a194-01433dc8508e service nova] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.325686] env[62070]: DEBUG nova.network.neutron [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.479947] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.389s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.480498] env[62070]: DEBUG nova.compute.manager [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 562.483522] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 6.344s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.589483] env[62070]: DEBUG nova.network.neutron [-] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.699015] env[62070]: DEBUG oslo_concurrency.lockutils [req-73602a73-3fe4-4bd3-9321-523a62f590f3 req-e7829635-ac94-404c-a194-01433dc8508e service nova] Releasing lock "refresh_cache-87462fbe-d62d-4b40-880c-a1785c9ed5d4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.699371] env[62070]: DEBUG nova.compute.manager [req-73602a73-3fe4-4bd3-9321-523a62f590f3 req-e7829635-ac94-404c-a194-01433dc8508e service nova] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Received event network-vif-deleted-af01c0ca-546d-4b4d-a8cf-007e5f080e9c {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 562.828366] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Releasing lock "refresh_cache-9b95dfcb-718e-478d-85bc-7479be9b67de" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.829510] env[62070]: DEBUG nova.compute.manager [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 562.829510] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 562.829510] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-af46d048-056e-4fee-9aff-fe5246cd0458 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.842352] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1aad7ce-cb63-4f38-aaaa-670ff173097c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.866798] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9b95dfcb-718e-478d-85bc-7479be9b67de could not be found. [ 562.867649] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 562.867649] env[62070]: INFO nova.compute.manager [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Took 0.04 seconds to destroy the instance on the hypervisor. [ 562.867649] env[62070]: DEBUG oslo.service.loopingcall [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 562.867766] env[62070]: DEBUG nova.compute.manager [-] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 562.867877] env[62070]: DEBUG nova.network.neutron [-] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 562.897526] env[62070]: DEBUG nova.network.neutron [-] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 562.989291] env[62070]: DEBUG nova.compute.utils [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 562.995771] env[62070]: DEBUG nova.compute.manager [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 562.996245] env[62070]: DEBUG nova.network.neutron [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 563.095117] env[62070]: INFO nova.compute.manager [-] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Took 1.10 seconds to deallocate network for instance. [ 563.096074] env[62070]: DEBUG nova.policy [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5594d22d582140578bccd3581fd610b6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6915fe8f9a5a407b92aa3f69ce007be1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 563.103761] env[62070]: DEBUG nova.compute.claims [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 563.103998] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.233105] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Acquiring lock "7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.234079] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Lock "7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.270970] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0693217b-d7ce-4ccf-8174-7245513025c2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.279343] env[62070]: ERROR nova.compute.manager [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2b87ed7e-d1f9-4892-bf38-4d0846594037, please check neutron logs for more information. [ 563.279343] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 563.279343] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 563.279343] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 563.279343] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 563.279343] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 563.279343] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 563.279343] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 563.279343] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 563.279343] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 563.279343] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 563.279343] env[62070]: ERROR nova.compute.manager raise self.value [ 563.279343] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 563.279343] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 563.279343] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 563.279343] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 563.279908] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 563.279908] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 563.279908] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2b87ed7e-d1f9-4892-bf38-4d0846594037, please check neutron logs for more information. [ 563.279908] env[62070]: ERROR nova.compute.manager [ 563.279908] env[62070]: Traceback (most recent call last): [ 563.279908] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 563.279908] env[62070]: listener.cb(fileno) [ 563.279908] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 563.279908] env[62070]: result = function(*args, **kwargs) [ 563.279908] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 563.279908] env[62070]: return func(*args, **kwargs) [ 563.279908] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 563.279908] env[62070]: raise e [ 563.279908] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 563.279908] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 563.279908] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 563.279908] env[62070]: created_port_ids = self._update_ports_for_instance( [ 563.279908] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 563.279908] env[62070]: with excutils.save_and_reraise_exception(): [ 563.279908] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 563.279908] env[62070]: self.force_reraise() [ 563.279908] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 563.279908] env[62070]: raise self.value [ 563.279908] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 563.279908] env[62070]: updated_port = self._update_port( [ 563.279908] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 563.279908] env[62070]: _ensure_no_port_binding_failure(port) [ 563.279908] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 563.279908] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 563.280746] env[62070]: nova.exception.PortBindingFailed: Binding failed for port 2b87ed7e-d1f9-4892-bf38-4d0846594037, please check neutron logs for more information. [ 563.280746] env[62070]: Removing descriptor: 14 [ 563.280746] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-614feea6-07d6-4b8b-80e3-2a1ff562c8fa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.287069] env[62070]: ERROR nova.compute.manager [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2b87ed7e-d1f9-4892-bf38-4d0846594037, please check neutron logs for more information. [ 563.287069] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Traceback (most recent call last): [ 563.287069] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 563.287069] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] yield resources [ 563.287069] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 563.287069] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] self.driver.spawn(context, instance, image_meta, [ 563.287069] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 563.287069] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] self._vmops.spawn(context, instance, image_meta, injected_files, [ 563.287069] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 563.287069] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] vm_ref = self.build_virtual_machine(instance, [ 563.287069] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 563.287698] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] vif_infos = vmwarevif.get_vif_info(self._session, [ 563.287698] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 563.287698] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] for vif in network_info: [ 563.287698] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 563.287698] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] return self._sync_wrapper(fn, *args, **kwargs) [ 563.287698] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 563.287698] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] self.wait() [ 563.287698] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 563.287698] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] self[:] = self._gt.wait() [ 563.287698] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 563.287698] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] return self._exit_event.wait() [ 563.287698] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 563.287698] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] result = hub.switch() [ 563.288033] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 563.288033] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] return self.greenlet.switch() [ 563.288033] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 563.288033] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] result = function(*args, **kwargs) [ 563.288033] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 563.288033] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] return func(*args, **kwargs) [ 563.288033] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 563.288033] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] raise e [ 563.288033] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 563.288033] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] nwinfo = self.network_api.allocate_for_instance( [ 563.288033] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 563.288033] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] created_port_ids = self._update_ports_for_instance( [ 563.288033] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 563.288376] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] with excutils.save_and_reraise_exception(): [ 563.288376] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 563.288376] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] self.force_reraise() [ 563.288376] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 563.288376] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] raise self.value [ 563.288376] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 563.288376] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] updated_port = self._update_port( [ 563.288376] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 563.288376] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] _ensure_no_port_binding_failure(port) [ 563.288376] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 563.288376] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] raise exception.PortBindingFailed(port_id=port['id']) [ 563.288376] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] nova.exception.PortBindingFailed: Binding failed for port 2b87ed7e-d1f9-4892-bf38-4d0846594037, please check neutron logs for more information. [ 563.288376] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] [ 563.288758] env[62070]: INFO nova.compute.manager [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Terminating instance [ 563.290583] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "refresh_cache-9fd18ad4-7c72-4a13-8c29-da660a060020" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 563.290740] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired lock "refresh_cache-9fd18ad4-7c72-4a13-8c29-da660a060020" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 563.290904] env[62070]: DEBUG nova.network.neutron [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 563.326420] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41299f0f-4ac8-400c-a84e-0692c0045b4c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.335058] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f28cc59-41f2-4aa0-b1b1-83866d3602f5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.356035] env[62070]: DEBUG nova.compute.provider_tree [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 563.405308] env[62070]: DEBUG nova.network.neutron [-] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.470271] env[62070]: DEBUG oslo_concurrency.lockutils [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Acquiring lock "2226072d-16f2-4ea1-a56c-d866554c7379" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.470834] env[62070]: DEBUG oslo_concurrency.lockutils [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Lock "2226072d-16f2-4ea1-a56c-d866554c7379" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.496813] env[62070]: DEBUG nova.compute.manager [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 563.859622] env[62070]: DEBUG nova.scheduler.client.report [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 563.865844] env[62070]: DEBUG nova.network.neutron [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 563.906715] env[62070]: INFO nova.compute.manager [-] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Took 1.04 seconds to deallocate network for instance. [ 563.911430] env[62070]: DEBUG nova.compute.claims [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 563.911614] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.999127] env[62070]: DEBUG nova.network.neutron [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Successfully created port: e3ce78d3-0012-4892-8bad-29f3337f38db {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 564.080551] env[62070]: DEBUG nova.compute.manager [req-72ce475a-0ef9-40b3-8c53-c9bd51895587 req-29560161-b56b-421f-915c-3def2020c9f7 service nova] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Received event network-changed-2b87ed7e-d1f9-4892-bf38-4d0846594037 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 564.080800] env[62070]: DEBUG nova.compute.manager [req-72ce475a-0ef9-40b3-8c53-c9bd51895587 req-29560161-b56b-421f-915c-3def2020c9f7 service nova] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Refreshing instance network info cache due to event network-changed-2b87ed7e-d1f9-4892-bf38-4d0846594037. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 564.081139] env[62070]: DEBUG oslo_concurrency.lockutils [req-72ce475a-0ef9-40b3-8c53-c9bd51895587 req-29560161-b56b-421f-915c-3def2020c9f7 service nova] Acquiring lock "refresh_cache-9fd18ad4-7c72-4a13-8c29-da660a060020" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.124516] env[62070]: DEBUG nova.compute.manager [req-00472304-8133-4f61-bc27-dad30a318517 req-234ab464-e11a-4075-b4c9-5cdeb9763c05 service nova] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Received event network-changed-fb4707e9-7728-45c0-9830-a99c0cf681b3 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 564.124712] env[62070]: DEBUG nova.compute.manager [req-00472304-8133-4f61-bc27-dad30a318517 req-234ab464-e11a-4075-b4c9-5cdeb9763c05 service nova] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Refreshing instance network info cache due to event network-changed-fb4707e9-7728-45c0-9830-a99c0cf681b3. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 564.124954] env[62070]: DEBUG oslo_concurrency.lockutils [req-00472304-8133-4f61-bc27-dad30a318517 req-234ab464-e11a-4075-b4c9-5cdeb9763c05 service nova] Acquiring lock "refresh_cache-deee86eb-365b-4104-8687-72abdbf3807f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.125214] env[62070]: DEBUG oslo_concurrency.lockutils [req-00472304-8133-4f61-bc27-dad30a318517 req-234ab464-e11a-4075-b4c9-5cdeb9763c05 service nova] Acquired lock "refresh_cache-deee86eb-365b-4104-8687-72abdbf3807f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.125375] env[62070]: DEBUG nova.network.neutron [req-00472304-8133-4f61-bc27-dad30a318517 req-234ab464-e11a-4075-b4c9-5cdeb9763c05 service nova] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Refreshing network info cache for port fb4707e9-7728-45c0-9830-a99c0cf681b3 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 564.141115] env[62070]: DEBUG nova.network.neutron [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 564.369487] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.886s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 564.371196] env[62070]: ERROR nova.compute.manager [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 38acdc96-ca4e-4eff-b4ff-8a9bbb18fb3f, please check neutron logs for more information. [ 564.371196] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Traceback (most recent call last): [ 564.371196] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 564.371196] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] self.driver.spawn(context, instance, image_meta, [ 564.371196] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 564.371196] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 564.371196] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 564.371196] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] vm_ref = self.build_virtual_machine(instance, [ 564.371196] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 564.371196] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] vif_infos = vmwarevif.get_vif_info(self._session, [ 564.371196] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 564.371537] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] for vif in network_info: [ 564.371537] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 564.371537] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] return self._sync_wrapper(fn, *args, **kwargs) [ 564.371537] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 564.371537] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] self.wait() [ 564.371537] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 564.371537] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] self[:] = self._gt.wait() [ 564.371537] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 564.371537] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] return self._exit_event.wait() [ 564.371537] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 564.371537] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] result = hub.switch() [ 564.371537] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 564.371537] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] return self.greenlet.switch() [ 564.372781] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 564.372781] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] result = function(*args, **kwargs) [ 564.372781] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 564.372781] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] return func(*args, **kwargs) [ 564.372781] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 564.372781] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] raise e [ 564.372781] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 564.372781] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] nwinfo = self.network_api.allocate_for_instance( [ 564.372781] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 564.372781] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] created_port_ids = self._update_ports_for_instance( [ 564.372781] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 564.372781] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] with excutils.save_and_reraise_exception(): [ 564.372781] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 564.373313] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] self.force_reraise() [ 564.373313] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 564.373313] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] raise self.value [ 564.373313] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 564.373313] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] updated_port = self._update_port( [ 564.373313] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 564.373313] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] _ensure_no_port_binding_failure(port) [ 564.373313] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 564.373313] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] raise exception.PortBindingFailed(port_id=port['id']) [ 564.373313] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] nova.exception.PortBindingFailed: Binding failed for port 38acdc96-ca4e-4eff-b4ff-8a9bbb18fb3f, please check neutron logs for more information. [ 564.373313] env[62070]: ERROR nova.compute.manager [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] [ 564.376759] env[62070]: DEBUG nova.compute.utils [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Binding failed for port 38acdc96-ca4e-4eff-b4ff-8a9bbb18fb3f, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 564.376759] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.822s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.376759] env[62070]: INFO nova.compute.claims [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 564.386788] env[62070]: DEBUG nova.compute.manager [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Build of instance 9688a493-7046-49e1-b5ab-0db9cfbf37aa was re-scheduled: Binding failed for port 38acdc96-ca4e-4eff-b4ff-8a9bbb18fb3f, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 564.391650] env[62070]: DEBUG nova.compute.manager [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 564.391650] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Acquiring lock "refresh_cache-9688a493-7046-49e1-b5ab-0db9cfbf37aa" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.391650] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Acquired lock "refresh_cache-9688a493-7046-49e1-b5ab-0db9cfbf37aa" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.391650] env[62070]: DEBUG nova.network.neutron [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 564.515019] env[62070]: DEBUG nova.compute.manager [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 564.553454] env[62070]: DEBUG nova.virt.hardware [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 564.553454] env[62070]: DEBUG nova.virt.hardware [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 564.553454] env[62070]: DEBUG nova.virt.hardware [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 564.553880] env[62070]: DEBUG nova.virt.hardware [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 564.553880] env[62070]: DEBUG nova.virt.hardware [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 564.553880] env[62070]: DEBUG nova.virt.hardware [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 564.553880] env[62070]: DEBUG nova.virt.hardware [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 564.553880] env[62070]: DEBUG nova.virt.hardware [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 564.554073] env[62070]: DEBUG nova.virt.hardware [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 564.554073] env[62070]: DEBUG nova.virt.hardware [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 564.554073] env[62070]: DEBUG nova.virt.hardware [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 564.555061] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef551df-6a9b-472f-8fc4-42892c3618a7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.562927] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315a648f-4010-41b5-b2bd-305c85ed3c86 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.647142] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Releasing lock "refresh_cache-9fd18ad4-7c72-4a13-8c29-da660a060020" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 564.647610] env[62070]: DEBUG nova.compute.manager [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 564.647803] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 564.648473] env[62070]: DEBUG oslo_concurrency.lockutils [req-72ce475a-0ef9-40b3-8c53-c9bd51895587 req-29560161-b56b-421f-915c-3def2020c9f7 service nova] Acquired lock "refresh_cache-9fd18ad4-7c72-4a13-8c29-da660a060020" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.648654] env[62070]: DEBUG nova.network.neutron [req-72ce475a-0ef9-40b3-8c53-c9bd51895587 req-29560161-b56b-421f-915c-3def2020c9f7 service nova] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Refreshing network info cache for port 2b87ed7e-d1f9-4892-bf38-4d0846594037 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 564.649809] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f9976ab2-ccbf-4666-9cf7-a636e28ab1ce {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.654217] env[62070]: DEBUG nova.network.neutron [req-00472304-8133-4f61-bc27-dad30a318517 req-234ab464-e11a-4075-b4c9-5cdeb9763c05 service nova] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 564.661331] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1fc9c1d-a1fb-4f1a-803e-2531ba8fae39 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.686159] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9fd18ad4-7c72-4a13-8c29-da660a060020 could not be found. [ 564.686448] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 564.686676] env[62070]: INFO nova.compute.manager [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Took 0.04 seconds to destroy the instance on the hypervisor. [ 564.686971] env[62070]: DEBUG oslo.service.loopingcall [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 564.689437] env[62070]: DEBUG nova.compute.manager [-] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 564.689510] env[62070]: DEBUG nova.network.neutron [-] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 564.716766] env[62070]: DEBUG nova.network.neutron [-] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 564.793167] env[62070]: DEBUG nova.network.neutron [req-00472304-8133-4f61-bc27-dad30a318517 req-234ab464-e11a-4075-b4c9-5cdeb9763c05 service nova] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 564.920687] env[62070]: DEBUG nova.network.neutron [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 565.033359] env[62070]: DEBUG nova.network.neutron [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.173994] env[62070]: DEBUG nova.network.neutron [req-72ce475a-0ef9-40b3-8c53-c9bd51895587 req-29560161-b56b-421f-915c-3def2020c9f7 service nova] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 565.219904] env[62070]: DEBUG nova.network.neutron [-] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.295429] env[62070]: DEBUG oslo_concurrency.lockutils [req-00472304-8133-4f61-bc27-dad30a318517 req-234ab464-e11a-4075-b4c9-5cdeb9763c05 service nova] Releasing lock "refresh_cache-deee86eb-365b-4104-8687-72abdbf3807f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 565.296033] env[62070]: DEBUG nova.compute.manager [req-00472304-8133-4f61-bc27-dad30a318517 req-234ab464-e11a-4075-b4c9-5cdeb9763c05 service nova] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Received event network-vif-deleted-fb4707e9-7728-45c0-9830-a99c0cf681b3 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 565.296287] env[62070]: DEBUG nova.compute.manager [req-00472304-8133-4f61-bc27-dad30a318517 req-234ab464-e11a-4075-b4c9-5cdeb9763c05 service nova] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Received event network-changed-e3164438-df26-48c5-84eb-9925989f48e6 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 565.296392] env[62070]: DEBUG nova.compute.manager [req-00472304-8133-4f61-bc27-dad30a318517 req-234ab464-e11a-4075-b4c9-5cdeb9763c05 service nova] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Refreshing instance network info cache due to event network-changed-e3164438-df26-48c5-84eb-9925989f48e6. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 565.296609] env[62070]: DEBUG oslo_concurrency.lockutils [req-00472304-8133-4f61-bc27-dad30a318517 req-234ab464-e11a-4075-b4c9-5cdeb9763c05 service nova] Acquiring lock "refresh_cache-9b95dfcb-718e-478d-85bc-7479be9b67de" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 565.296778] env[62070]: DEBUG oslo_concurrency.lockutils [req-00472304-8133-4f61-bc27-dad30a318517 req-234ab464-e11a-4075-b4c9-5cdeb9763c05 service nova] Acquired lock "refresh_cache-9b95dfcb-718e-478d-85bc-7479be9b67de" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 565.296951] env[62070]: DEBUG nova.network.neutron [req-00472304-8133-4f61-bc27-dad30a318517 req-234ab464-e11a-4075-b4c9-5cdeb9763c05 service nova] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Refreshing network info cache for port e3164438-df26-48c5-84eb-9925989f48e6 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 565.499029] env[62070]: DEBUG nova.network.neutron [req-72ce475a-0ef9-40b3-8c53-c9bd51895587 req-29560161-b56b-421f-915c-3def2020c9f7 service nova] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.538234] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Releasing lock "refresh_cache-9688a493-7046-49e1-b5ab-0db9cfbf37aa" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 565.538405] env[62070]: DEBUG nova.compute.manager [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 565.539867] env[62070]: DEBUG nova.compute.manager [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 565.539867] env[62070]: DEBUG nova.network.neutron [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 565.557782] env[62070]: DEBUG nova.network.neutron [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 565.667772] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71034ea6-8201-49e4-8a32-f1e56e3d9b20 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.677436] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-339a5158-076f-415b-ab75-e11996511fa6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.713292] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b52e6cb5-8841-490d-9fe2-2bb78668d0ae {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.721736] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d29b77fb-5685-47b9-b8cf-c91a9b25216a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.725912] env[62070]: INFO nova.compute.manager [-] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Took 1.04 seconds to deallocate network for instance. [ 565.729331] env[62070]: DEBUG nova.compute.claims [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 565.729573] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.739387] env[62070]: DEBUG nova.compute.provider_tree [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 565.819568] env[62070]: DEBUG nova.network.neutron [req-00472304-8133-4f61-bc27-dad30a318517 req-234ab464-e11a-4075-b4c9-5cdeb9763c05 service nova] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 565.911219] env[62070]: DEBUG nova.network.neutron [req-00472304-8133-4f61-bc27-dad30a318517 req-234ab464-e11a-4075-b4c9-5cdeb9763c05 service nova] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.003138] env[62070]: DEBUG oslo_concurrency.lockutils [req-72ce475a-0ef9-40b3-8c53-c9bd51895587 req-29560161-b56b-421f-915c-3def2020c9f7 service nova] Releasing lock "refresh_cache-9fd18ad4-7c72-4a13-8c29-da660a060020" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 566.062457] env[62070]: DEBUG nova.network.neutron [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.093562] env[62070]: ERROR nova.compute.manager [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e3ce78d3-0012-4892-8bad-29f3337f38db, please check neutron logs for more information. [ 566.093562] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 566.093562] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 566.093562] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 566.093562] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 566.093562] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 566.093562] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 566.093562] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 566.093562] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 566.093562] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 566.093562] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 566.093562] env[62070]: ERROR nova.compute.manager raise self.value [ 566.093562] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 566.093562] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 566.093562] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 566.093562] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 566.094086] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 566.094086] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 566.094086] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e3ce78d3-0012-4892-8bad-29f3337f38db, please check neutron logs for more information. [ 566.094086] env[62070]: ERROR nova.compute.manager [ 566.094191] env[62070]: Traceback (most recent call last): [ 566.094191] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 566.094191] env[62070]: listener.cb(fileno) [ 566.094191] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 566.094191] env[62070]: result = function(*args, **kwargs) [ 566.094191] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 566.094191] env[62070]: return func(*args, **kwargs) [ 566.094191] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 566.094191] env[62070]: raise e [ 566.094191] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 566.094191] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 566.094191] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 566.094191] env[62070]: created_port_ids = self._update_ports_for_instance( [ 566.094512] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 566.094512] env[62070]: with excutils.save_and_reraise_exception(): [ 566.094512] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 566.094512] env[62070]: self.force_reraise() [ 566.094512] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 566.094512] env[62070]: raise self.value [ 566.094512] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 566.094512] env[62070]: updated_port = self._update_port( [ 566.094512] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 566.094512] env[62070]: _ensure_no_port_binding_failure(port) [ 566.094512] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 566.094512] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 566.094512] env[62070]: nova.exception.PortBindingFailed: Binding failed for port e3ce78d3-0012-4892-8bad-29f3337f38db, please check neutron logs for more information. [ 566.094512] env[62070]: Removing descriptor: 16 [ 566.095452] env[62070]: ERROR nova.compute.manager [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e3ce78d3-0012-4892-8bad-29f3337f38db, please check neutron logs for more information. [ 566.095452] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Traceback (most recent call last): [ 566.095452] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 566.095452] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] yield resources [ 566.095452] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 566.095452] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] self.driver.spawn(context, instance, image_meta, [ 566.095452] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 566.095452] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 566.095452] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 566.095452] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] vm_ref = self.build_virtual_machine(instance, [ 566.095452] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 566.095752] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] vif_infos = vmwarevif.get_vif_info(self._session, [ 566.095752] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 566.095752] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] for vif in network_info: [ 566.095752] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 566.095752] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] return self._sync_wrapper(fn, *args, **kwargs) [ 566.095752] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 566.095752] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] self.wait() [ 566.095752] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 566.095752] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] self[:] = self._gt.wait() [ 566.095752] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 566.095752] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] return self._exit_event.wait() [ 566.095752] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 566.095752] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] result = hub.switch() [ 566.096096] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 566.096096] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] return self.greenlet.switch() [ 566.096096] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 566.096096] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] result = function(*args, **kwargs) [ 566.096096] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 566.096096] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] return func(*args, **kwargs) [ 566.096096] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 566.096096] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] raise e [ 566.096096] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 566.096096] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] nwinfo = self.network_api.allocate_for_instance( [ 566.096096] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 566.096096] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] created_port_ids = self._update_ports_for_instance( [ 566.096096] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 566.096431] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] with excutils.save_and_reraise_exception(): [ 566.096431] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 566.096431] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] self.force_reraise() [ 566.096431] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 566.096431] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] raise self.value [ 566.096431] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 566.096431] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] updated_port = self._update_port( [ 566.096431] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 566.096431] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] _ensure_no_port_binding_failure(port) [ 566.096431] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 566.096431] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] raise exception.PortBindingFailed(port_id=port['id']) [ 566.096431] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] nova.exception.PortBindingFailed: Binding failed for port e3ce78d3-0012-4892-8bad-29f3337f38db, please check neutron logs for more information. [ 566.096431] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] [ 566.096834] env[62070]: INFO nova.compute.manager [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Terminating instance [ 566.100613] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Acquiring lock "refresh_cache-768b0b5f-7d20-4bc6-87f6-b66adcce42c6" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.101976] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Acquired lock "refresh_cache-768b0b5f-7d20-4bc6-87f6-b66adcce42c6" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.101976] env[62070]: DEBUG nova.network.neutron [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 566.223250] env[62070]: DEBUG oslo_concurrency.lockutils [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Acquiring lock "d8478b63-3a62-4afa-950b-edf9774e8ea8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.223548] env[62070]: DEBUG oslo_concurrency.lockutils [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Lock "d8478b63-3a62-4afa-950b-edf9774e8ea8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.243213] env[62070]: DEBUG nova.scheduler.client.report [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 566.418935] env[62070]: DEBUG oslo_concurrency.lockutils [req-00472304-8133-4f61-bc27-dad30a318517 req-234ab464-e11a-4075-b4c9-5cdeb9763c05 service nova] Releasing lock "refresh_cache-9b95dfcb-718e-478d-85bc-7479be9b67de" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 566.418935] env[62070]: DEBUG nova.compute.manager [req-00472304-8133-4f61-bc27-dad30a318517 req-234ab464-e11a-4075-b4c9-5cdeb9763c05 service nova] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Received event network-vif-deleted-e3164438-df26-48c5-84eb-9925989f48e6 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 566.568973] env[62070]: INFO nova.compute.manager [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] [instance: 9688a493-7046-49e1-b5ab-0db9cfbf37aa] Took 1.03 seconds to deallocate network for instance. [ 566.639869] env[62070]: DEBUG nova.network.neutron [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 566.752480] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.378s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.752986] env[62070]: DEBUG nova.compute.manager [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 566.756600] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.942s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.760738] env[62070]: INFO nova.compute.claims [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 566.792213] env[62070]: DEBUG nova.network.neutron [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.026077] env[62070]: DEBUG nova.compute.manager [req-8187c359-1749-4172-8912-9ce7c7ae3329 req-566e46f2-3c5b-4e36-9b99-8d6a18a9670a service nova] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Received event network-vif-deleted-2b87ed7e-d1f9-4892-bf38-4d0846594037 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 567.045989] env[62070]: DEBUG nova.compute.manager [req-c6cdd588-5aeb-4044-9f81-1b8cd382a841 req-6dc77e6f-114b-44dc-8fda-32ee8253c737 service nova] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Received event network-changed-e3ce78d3-0012-4892-8bad-29f3337f38db {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 567.046569] env[62070]: DEBUG nova.compute.manager [req-c6cdd588-5aeb-4044-9f81-1b8cd382a841 req-6dc77e6f-114b-44dc-8fda-32ee8253c737 service nova] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Refreshing instance network info cache due to event network-changed-e3ce78d3-0012-4892-8bad-29f3337f38db. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 567.046569] env[62070]: DEBUG oslo_concurrency.lockutils [req-c6cdd588-5aeb-4044-9f81-1b8cd382a841 req-6dc77e6f-114b-44dc-8fda-32ee8253c737 service nova] Acquiring lock "refresh_cache-768b0b5f-7d20-4bc6-87f6-b66adcce42c6" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.263078] env[62070]: DEBUG nova.compute.utils [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 567.267103] env[62070]: DEBUG nova.compute.manager [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 567.267307] env[62070]: DEBUG nova.network.neutron [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 567.295212] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Releasing lock "refresh_cache-768b0b5f-7d20-4bc6-87f6-b66adcce42c6" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 567.295212] env[62070]: DEBUG nova.compute.manager [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 567.295323] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 567.295629] env[62070]: DEBUG oslo_concurrency.lockutils [req-c6cdd588-5aeb-4044-9f81-1b8cd382a841 req-6dc77e6f-114b-44dc-8fda-32ee8253c737 service nova] Acquired lock "refresh_cache-768b0b5f-7d20-4bc6-87f6-b66adcce42c6" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.296078] env[62070]: DEBUG nova.network.neutron [req-c6cdd588-5aeb-4044-9f81-1b8cd382a841 req-6dc77e6f-114b-44dc-8fda-32ee8253c737 service nova] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Refreshing network info cache for port e3ce78d3-0012-4892-8bad-29f3337f38db {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 567.299868] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-84c77ba7-9e64-40ec-883f-2ebcfa607250 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.309244] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7180c587-49c9-4820-8a48-2cfbe511a736 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.326288] env[62070]: DEBUG nova.policy [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '293b5493dba141d6bdf93c1e8d00abf0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1fc015059e894364ae55cc21204693d3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 567.334385] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 768b0b5f-7d20-4bc6-87f6-b66adcce42c6 could not be found. [ 567.335032] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 567.335032] env[62070]: INFO nova.compute.manager [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 567.335151] env[62070]: DEBUG oslo.service.loopingcall [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 567.335285] env[62070]: DEBUG nova.compute.manager [-] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 567.335381] env[62070]: DEBUG nova.network.neutron [-] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 567.367123] env[62070]: DEBUG nova.network.neutron [-] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 567.612156] env[62070]: INFO nova.scheduler.client.report [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Deleted allocations for instance 9688a493-7046-49e1-b5ab-0db9cfbf37aa [ 567.742555] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Acquiring lock "d7a90be3-d3d6-4626-944b-b907cf7fb64d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.742791] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Lock "d7a90be3-d3d6-4626-944b-b907cf7fb64d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.745827] env[62070]: DEBUG nova.network.neutron [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Successfully created port: bd48685f-7c51-4b6b-8b0b-6ac758a2eb4d {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 567.775120] env[62070]: DEBUG nova.compute.manager [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 567.832648] env[62070]: DEBUG nova.network.neutron [req-c6cdd588-5aeb-4044-9f81-1b8cd382a841 req-6dc77e6f-114b-44dc-8fda-32ee8253c737 service nova] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 567.875604] env[62070]: DEBUG nova.network.neutron [-] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.021265] env[62070]: DEBUG nova.network.neutron [req-c6cdd588-5aeb-4044-9f81-1b8cd382a841 req-6dc77e6f-114b-44dc-8fda-32ee8253c737 service nova] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.122894] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0bc5a0f5-5f77-4994-b513-243b307e9e97 tempest-ServersAdminNegativeTestJSON-1932509401 tempest-ServersAdminNegativeTestJSON-1932509401-project-member] Lock "9688a493-7046-49e1-b5ab-0db9cfbf37aa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.041s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 568.134993] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b116002a-84c4-43fb-b6a9-d1864b4823bc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.143975] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8bd5032-ddce-48f2-8642-a4af422bbb51 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.182634] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e78027c2-81c9-4d9a-925e-4aa60949c563 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.192328] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3171f931-f87e-48ad-9dd9-105b1f4918f8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.208649] env[62070]: DEBUG nova.compute.provider_tree [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 568.381653] env[62070]: INFO nova.compute.manager [-] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Took 1.05 seconds to deallocate network for instance. [ 568.387668] env[62070]: DEBUG nova.compute.claims [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 568.388476] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.524443] env[62070]: DEBUG oslo_concurrency.lockutils [req-c6cdd588-5aeb-4044-9f81-1b8cd382a841 req-6dc77e6f-114b-44dc-8fda-32ee8253c737 service nova] Releasing lock "refresh_cache-768b0b5f-7d20-4bc6-87f6-b66adcce42c6" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.524727] env[62070]: DEBUG nova.compute.manager [req-c6cdd588-5aeb-4044-9f81-1b8cd382a841 req-6dc77e6f-114b-44dc-8fda-32ee8253c737 service nova] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Received event network-vif-deleted-e3ce78d3-0012-4892-8bad-29f3337f38db {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 568.627934] env[62070]: DEBUG nova.compute.manager [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 568.717059] env[62070]: DEBUG nova.scheduler.client.report [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 568.787797] env[62070]: DEBUG nova.compute.manager [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 568.825165] env[62070]: DEBUG nova.virt.hardware [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 568.825422] env[62070]: DEBUG nova.virt.hardware [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 568.825575] env[62070]: DEBUG nova.virt.hardware [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 568.825764] env[62070]: DEBUG nova.virt.hardware [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 568.825911] env[62070]: DEBUG nova.virt.hardware [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 568.826065] env[62070]: DEBUG nova.virt.hardware [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 568.826298] env[62070]: DEBUG nova.virt.hardware [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 568.826471] env[62070]: DEBUG nova.virt.hardware [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 568.826637] env[62070]: DEBUG nova.virt.hardware [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 568.826795] env[62070]: DEBUG nova.virt.hardware [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 568.826969] env[62070]: DEBUG nova.virt.hardware [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 568.827880] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c95e39-6893-45e7-b6d3-e0918042fac1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.838936] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-822f1e54-59d8-40c1-a312-73f0de3e8c53 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.171769] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.228163] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.469s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.228163] env[62070]: DEBUG nova.compute.manager [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 569.232725] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 10.020s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.739428] env[62070]: DEBUG nova.compute.utils [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 569.742152] env[62070]: DEBUG nova.compute.manager [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 569.742344] env[62070]: DEBUG nova.network.neutron [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 569.848205] env[62070]: DEBUG nova.policy [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '293b5493dba141d6bdf93c1e8d00abf0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1fc015059e894364ae55cc21204693d3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 570.165788] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e565ed2f-29e6-44a7-b153-f4c21292e184 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.175532] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e08763-7c4c-4218-84ac-e11a5633e18e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.211711] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c2cf50f-fd59-4886-a476-bed51e7fd92a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.220491] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd813820-7923-4892-8b64-33435dbe410f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.243288] env[62070]: DEBUG nova.compute.provider_tree [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 570.246217] env[62070]: DEBUG nova.compute.manager [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 570.502529] env[62070]: DEBUG nova.compute.manager [req-ef9e73d7-86ee-4eac-85c8-7de378d2e559 req-5d96c01f-1646-42b3-8aaa-82766de8379d service nova] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Received event network-changed-bd48685f-7c51-4b6b-8b0b-6ac758a2eb4d {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 570.502529] env[62070]: DEBUG nova.compute.manager [req-ef9e73d7-86ee-4eac-85c8-7de378d2e559 req-5d96c01f-1646-42b3-8aaa-82766de8379d service nova] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Refreshing instance network info cache due to event network-changed-bd48685f-7c51-4b6b-8b0b-6ac758a2eb4d. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 570.502529] env[62070]: DEBUG oslo_concurrency.lockutils [req-ef9e73d7-86ee-4eac-85c8-7de378d2e559 req-5d96c01f-1646-42b3-8aaa-82766de8379d service nova] Acquiring lock "refresh_cache-e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.502529] env[62070]: DEBUG oslo_concurrency.lockutils [req-ef9e73d7-86ee-4eac-85c8-7de378d2e559 req-5d96c01f-1646-42b3-8aaa-82766de8379d service nova] Acquired lock "refresh_cache-e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.502529] env[62070]: DEBUG nova.network.neutron [req-ef9e73d7-86ee-4eac-85c8-7de378d2e559 req-5d96c01f-1646-42b3-8aaa-82766de8379d service nova] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Refreshing network info cache for port bd48685f-7c51-4b6b-8b0b-6ac758a2eb4d {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 570.521983] env[62070]: ERROR nova.compute.manager [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bd48685f-7c51-4b6b-8b0b-6ac758a2eb4d, please check neutron logs for more information. [ 570.521983] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 570.521983] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 570.521983] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 570.521983] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 570.521983] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 570.521983] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 570.521983] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 570.521983] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 570.521983] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 570.521983] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 570.521983] env[62070]: ERROR nova.compute.manager raise self.value [ 570.521983] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 570.521983] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 570.521983] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 570.521983] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 570.522833] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 570.522833] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 570.522833] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bd48685f-7c51-4b6b-8b0b-6ac758a2eb4d, please check neutron logs for more information. [ 570.522833] env[62070]: ERROR nova.compute.manager [ 570.522833] env[62070]: Traceback (most recent call last): [ 570.522833] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 570.522833] env[62070]: listener.cb(fileno) [ 570.522833] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 570.522833] env[62070]: result = function(*args, **kwargs) [ 570.522833] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 570.522833] env[62070]: return func(*args, **kwargs) [ 570.522833] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 570.522833] env[62070]: raise e [ 570.522833] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 570.522833] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 570.522833] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 570.522833] env[62070]: created_port_ids = self._update_ports_for_instance( [ 570.522833] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 570.522833] env[62070]: with excutils.save_and_reraise_exception(): [ 570.522833] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 570.522833] env[62070]: self.force_reraise() [ 570.522833] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 570.522833] env[62070]: raise self.value [ 570.522833] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 570.522833] env[62070]: updated_port = self._update_port( [ 570.522833] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 570.522833] env[62070]: _ensure_no_port_binding_failure(port) [ 570.522833] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 570.522833] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 570.523736] env[62070]: nova.exception.PortBindingFailed: Binding failed for port bd48685f-7c51-4b6b-8b0b-6ac758a2eb4d, please check neutron logs for more information. [ 570.523736] env[62070]: Removing descriptor: 16 [ 570.523736] env[62070]: ERROR nova.compute.manager [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bd48685f-7c51-4b6b-8b0b-6ac758a2eb4d, please check neutron logs for more information. [ 570.523736] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Traceback (most recent call last): [ 570.523736] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 570.523736] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] yield resources [ 570.523736] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 570.523736] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] self.driver.spawn(context, instance, image_meta, [ 570.523736] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 570.523736] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] self._vmops.spawn(context, instance, image_meta, injected_files, [ 570.523736] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 570.523736] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] vm_ref = self.build_virtual_machine(instance, [ 570.524182] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 570.524182] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] vif_infos = vmwarevif.get_vif_info(self._session, [ 570.524182] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 570.524182] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] for vif in network_info: [ 570.524182] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 570.524182] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] return self._sync_wrapper(fn, *args, **kwargs) [ 570.524182] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 570.524182] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] self.wait() [ 570.524182] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 570.524182] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] self[:] = self._gt.wait() [ 570.524182] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 570.524182] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] return self._exit_event.wait() [ 570.524182] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 570.524586] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] result = hub.switch() [ 570.524586] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 570.524586] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] return self.greenlet.switch() [ 570.524586] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 570.524586] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] result = function(*args, **kwargs) [ 570.524586] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 570.524586] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] return func(*args, **kwargs) [ 570.524586] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 570.524586] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] raise e [ 570.524586] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 570.524586] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] nwinfo = self.network_api.allocate_for_instance( [ 570.524586] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 570.524586] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] created_port_ids = self._update_ports_for_instance( [ 570.525449] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 570.525449] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] with excutils.save_and_reraise_exception(): [ 570.525449] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 570.525449] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] self.force_reraise() [ 570.525449] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 570.525449] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] raise self.value [ 570.525449] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 570.525449] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] updated_port = self._update_port( [ 570.525449] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 570.525449] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] _ensure_no_port_binding_failure(port) [ 570.525449] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 570.525449] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] raise exception.PortBindingFailed(port_id=port['id']) [ 570.525820] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] nova.exception.PortBindingFailed: Binding failed for port bd48685f-7c51-4b6b-8b0b-6ac758a2eb4d, please check neutron logs for more information. [ 570.525820] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] [ 570.525820] env[62070]: INFO nova.compute.manager [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Terminating instance [ 570.525820] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Acquiring lock "refresh_cache-e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.660077] env[62070]: DEBUG nova.network.neutron [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Successfully created port: 611560c7-4689-481c-9fd1-efc64eae1fd4 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 570.747106] env[62070]: DEBUG nova.scheduler.client.report [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 571.070188] env[62070]: DEBUG nova.network.neutron [req-ef9e73d7-86ee-4eac-85c8-7de378d2e559 req-5d96c01f-1646-42b3-8aaa-82766de8379d service nova] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 571.105459] env[62070]: DEBUG nova.compute.manager [None req-f991f421-96f2-450b-93da-babe16b13d4d tempest-ServerDiagnosticsV248Test-296067008 tempest-ServerDiagnosticsV248Test-296067008-project-admin] [instance: 283e7488-1240-475f-a74d-809251950774] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 571.108837] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-592e18ad-6ecd-4c07-8abf-9307182059ba {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.118432] env[62070]: INFO nova.compute.manager [None req-f991f421-96f2-450b-93da-babe16b13d4d tempest-ServerDiagnosticsV248Test-296067008 tempest-ServerDiagnosticsV248Test-296067008-project-admin] [instance: 283e7488-1240-475f-a74d-809251950774] Retrieving diagnostics [ 571.119678] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f76cabb7-42dd-48e8-b6d3-ecdca6e7c7ce {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.259484] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.027s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.260125] env[62070]: ERROR nova.compute.manager [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 38930f3e-c522-4232-9100-7512b09dda78, please check neutron logs for more information. [ 571.260125] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Traceback (most recent call last): [ 571.260125] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 571.260125] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] self.driver.spawn(context, instance, image_meta, [ 571.260125] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 571.260125] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] self._vmops.spawn(context, instance, image_meta, injected_files, [ 571.260125] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 571.260125] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] vm_ref = self.build_virtual_machine(instance, [ 571.260125] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 571.260125] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] vif_infos = vmwarevif.get_vif_info(self._session, [ 571.260125] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 571.260446] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] for vif in network_info: [ 571.260446] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 571.260446] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] return self._sync_wrapper(fn, *args, **kwargs) [ 571.260446] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 571.260446] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] self.wait() [ 571.260446] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 571.260446] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] self[:] = self._gt.wait() [ 571.260446] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 571.260446] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] return self._exit_event.wait() [ 571.260446] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 571.260446] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] result = hub.switch() [ 571.260446] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 571.260446] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] return self.greenlet.switch() [ 571.260791] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 571.260791] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] result = function(*args, **kwargs) [ 571.260791] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 571.260791] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] return func(*args, **kwargs) [ 571.260791] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 571.260791] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] raise e [ 571.260791] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 571.260791] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] nwinfo = self.network_api.allocate_for_instance( [ 571.260791] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 571.260791] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] created_port_ids = self._update_ports_for_instance( [ 571.260791] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 571.260791] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] with excutils.save_and_reraise_exception(): [ 571.260791] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 571.261338] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] self.force_reraise() [ 571.261338] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 571.261338] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] raise self.value [ 571.261338] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 571.261338] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] updated_port = self._update_port( [ 571.261338] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 571.261338] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] _ensure_no_port_binding_failure(port) [ 571.261338] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 571.261338] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] raise exception.PortBindingFailed(port_id=port['id']) [ 571.261338] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] nova.exception.PortBindingFailed: Binding failed for port 38930f3e-c522-4232-9100-7512b09dda78, please check neutron logs for more information. [ 571.261338] env[62070]: ERROR nova.compute.manager [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] [ 571.261734] env[62070]: DEBUG nova.compute.utils [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Binding failed for port 38930f3e-c522-4232-9100-7512b09dda78, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 571.267616] env[62070]: DEBUG nova.compute.manager [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 571.275414] env[62070]: DEBUG nova.compute.manager [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Build of instance 7221a720-8ab9-44fd-abe2-8f8fc19b6433 was re-scheduled: Binding failed for port 38930f3e-c522-4232-9100-7512b09dda78, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 571.275414] env[62070]: DEBUG nova.compute.manager [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 571.275414] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Acquiring lock "refresh_cache-7221a720-8ab9-44fd-abe2-8f8fc19b6433" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.275414] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Acquired lock "refresh_cache-7221a720-8ab9-44fd-abe2-8f8fc19b6433" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.276048] env[62070]: DEBUG nova.network.neutron [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 571.278970] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.783s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.283141] env[62070]: INFO nova.compute.claims [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 571.315541] env[62070]: DEBUG nova.virt.hardware [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 571.316140] env[62070]: DEBUG nova.virt.hardware [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 571.316140] env[62070]: DEBUG nova.virt.hardware [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 571.316284] env[62070]: DEBUG nova.virt.hardware [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 571.316357] env[62070]: DEBUG nova.virt.hardware [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 571.316474] env[62070]: DEBUG nova.virt.hardware [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 571.316677] env[62070]: DEBUG nova.virt.hardware [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 571.316899] env[62070]: DEBUG nova.virt.hardware [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 571.316993] env[62070]: DEBUG nova.virt.hardware [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 571.320675] env[62070]: DEBUG nova.virt.hardware [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 571.320962] env[62070]: DEBUG nova.virt.hardware [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 571.322263] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8265f402-7ff4-46f4-a503-b82ea1d2c552 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.333129] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-247478d5-bdd6-4aeb-aebf-4587fcf7feba {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.507712] env[62070]: DEBUG nova.network.neutron [req-ef9e73d7-86ee-4eac-85c8-7de378d2e559 req-5d96c01f-1646-42b3-8aaa-82766de8379d service nova] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.848737] env[62070]: DEBUG nova.network.neutron [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 572.016352] env[62070]: DEBUG oslo_concurrency.lockutils [req-ef9e73d7-86ee-4eac-85c8-7de378d2e559 req-5d96c01f-1646-42b3-8aaa-82766de8379d service nova] Releasing lock "refresh_cache-e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.017464] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Acquired lock "refresh_cache-e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.017464] env[62070]: DEBUG nova.network.neutron [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 572.072354] env[62070]: DEBUG nova.network.neutron [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.586255] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Releasing lock "refresh_cache-7221a720-8ab9-44fd-abe2-8f8fc19b6433" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.586565] env[62070]: DEBUG nova.compute.manager [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 572.586765] env[62070]: DEBUG nova.compute.manager [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 572.586973] env[62070]: DEBUG nova.network.neutron [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 572.665188] env[62070]: DEBUG nova.network.neutron [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 572.674697] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe563d1-170b-48f5-acd5-ccf76c593964 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.679491] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad64067d-a229-40df-8941-94057cab8154 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.714822] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5475444e-6671-4798-9f06-2312c0f658bf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.723247] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76298852-41df-4a50-b443-5dd4408be165 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.739848] env[62070]: DEBUG nova.compute.provider_tree [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 572.900898] env[62070]: DEBUG nova.network.neutron [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 573.246183] env[62070]: DEBUG nova.scheduler.client.report [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 573.263578] env[62070]: DEBUG nova.network.neutron [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 573.405231] env[62070]: DEBUG nova.network.neutron [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 573.587581] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Acquiring lock "283e7488-1240-475f-a74d-809251950774" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.587581] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Lock "283e7488-1240-475f-a74d-809251950774" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.587581] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Acquiring lock "283e7488-1240-475f-a74d-809251950774-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.587581] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Lock "283e7488-1240-475f-a74d-809251950774-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.588125] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Lock "283e7488-1240-475f-a74d-809251950774-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.595025] env[62070]: INFO nova.compute.manager [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Terminating instance [ 573.597293] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Acquiring lock "refresh_cache-283e7488-1240-475f-a74d-809251950774" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.597883] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Acquired lock "refresh_cache-283e7488-1240-475f-a74d-809251950774" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.599059] env[62070]: DEBUG nova.network.neutron [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 573.750945] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.472s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.751493] env[62070]: DEBUG nova.compute.manager [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 573.757450] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 13.553s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.757639] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.757793] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62070) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 573.758105] env[62070]: DEBUG oslo_concurrency.lockutils [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.566s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.770979] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c73e21-1e06-4eff-98b9-04619e373532 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.775718] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Releasing lock "refresh_cache-e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.783978] env[62070]: DEBUG nova.compute.manager [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 573.783978] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 573.783978] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a60c813d-7816-4579-8a03-cd8e590e32d0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.790908] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766e6553-9a0e-4fd0-903f-6ef9935f4982 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.804874] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6fd1943-e1f3-4154-a107-a0b8bca4d917 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.840957] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e81e21be-d963-4751-b02c-bab52657fbf8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.845933] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619 could not be found. [ 573.845933] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 573.845933] env[62070]: INFO nova.compute.manager [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Took 0.06 seconds to destroy the instance on the hypervisor. [ 573.845933] env[62070]: DEBUG oslo.service.loopingcall [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 573.845933] env[62070]: DEBUG nova.compute.manager [-] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 573.845933] env[62070]: DEBUG nova.network.neutron [-] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 573.851767] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5dc2eba-ef35-44ff-b3d5-6cee5a27f9fa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.891207] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181360MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=62070) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 573.891207] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.911504] env[62070]: INFO nova.compute.manager [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] [instance: 7221a720-8ab9-44fd-abe2-8f8fc19b6433] Took 1.32 seconds to deallocate network for instance. [ 573.930450] env[62070]: DEBUG nova.network.neutron [-] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 574.008762] env[62070]: DEBUG nova.compute.manager [req-5c969378-a023-4cc4-aa62-72a9f84a45c2 req-6ea40fe3-926a-4c25-af61-c1e96eb4b7c7 service nova] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Received event network-vif-deleted-bd48685f-7c51-4b6b-8b0b-6ac758a2eb4d {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 574.170233] env[62070]: DEBUG nova.network.neutron [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 574.260250] env[62070]: DEBUG nova.compute.utils [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 574.261318] env[62070]: DEBUG nova.compute.manager [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 574.262830] env[62070]: DEBUG nova.network.neutron [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 574.434862] env[62070]: DEBUG nova.network.neutron [-] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.570195] env[62070]: DEBUG nova.network.neutron [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.599670] env[62070]: ERROR nova.compute.manager [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 611560c7-4689-481c-9fd1-efc64eae1fd4, please check neutron logs for more information. [ 574.599670] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 574.599670] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 574.599670] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 574.599670] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 574.599670] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 574.599670] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 574.599670] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 574.599670] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 574.599670] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 574.599670] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 574.599670] env[62070]: ERROR nova.compute.manager raise self.value [ 574.599670] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 574.599670] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 574.599670] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 574.599670] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 574.600397] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 574.600397] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 574.600397] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 611560c7-4689-481c-9fd1-efc64eae1fd4, please check neutron logs for more information. [ 574.600397] env[62070]: ERROR nova.compute.manager [ 574.600397] env[62070]: Traceback (most recent call last): [ 574.600397] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 574.600397] env[62070]: listener.cb(fileno) [ 574.600397] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 574.600397] env[62070]: result = function(*args, **kwargs) [ 574.600397] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 574.600397] env[62070]: return func(*args, **kwargs) [ 574.600397] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 574.600397] env[62070]: raise e [ 574.600397] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 574.600397] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 574.600397] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 574.600397] env[62070]: created_port_ids = self._update_ports_for_instance( [ 574.600397] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 574.600397] env[62070]: with excutils.save_and_reraise_exception(): [ 574.600397] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 574.600397] env[62070]: self.force_reraise() [ 574.600397] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 574.600397] env[62070]: raise self.value [ 574.600397] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 574.600397] env[62070]: updated_port = self._update_port( [ 574.600397] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 574.600397] env[62070]: _ensure_no_port_binding_failure(port) [ 574.600397] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 574.600397] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 574.601898] env[62070]: nova.exception.PortBindingFailed: Binding failed for port 611560c7-4689-481c-9fd1-efc64eae1fd4, please check neutron logs for more information. [ 574.601898] env[62070]: Removing descriptor: 14 [ 574.601898] env[62070]: ERROR nova.compute.manager [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 611560c7-4689-481c-9fd1-efc64eae1fd4, please check neutron logs for more information. [ 574.601898] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Traceback (most recent call last): [ 574.601898] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 574.601898] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] yield resources [ 574.601898] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 574.601898] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] self.driver.spawn(context, instance, image_meta, [ 574.601898] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 574.601898] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 574.601898] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 574.601898] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] vm_ref = self.build_virtual_machine(instance, [ 574.602486] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 574.602486] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] vif_infos = vmwarevif.get_vif_info(self._session, [ 574.602486] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 574.602486] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] for vif in network_info: [ 574.602486] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 574.602486] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] return self._sync_wrapper(fn, *args, **kwargs) [ 574.602486] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 574.602486] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] self.wait() [ 574.602486] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 574.602486] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] self[:] = self._gt.wait() [ 574.602486] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 574.602486] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] return self._exit_event.wait() [ 574.602486] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 574.603218] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] result = hub.switch() [ 574.603218] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 574.603218] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] return self.greenlet.switch() [ 574.603218] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 574.603218] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] result = function(*args, **kwargs) [ 574.603218] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 574.603218] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] return func(*args, **kwargs) [ 574.603218] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 574.603218] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] raise e [ 574.603218] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 574.603218] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] nwinfo = self.network_api.allocate_for_instance( [ 574.603218] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 574.603218] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] created_port_ids = self._update_ports_for_instance( [ 574.603753] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 574.603753] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] with excutils.save_and_reraise_exception(): [ 574.603753] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 574.603753] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] self.force_reraise() [ 574.603753] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 574.603753] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] raise self.value [ 574.603753] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 574.603753] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] updated_port = self._update_port( [ 574.603753] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 574.603753] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] _ensure_no_port_binding_failure(port) [ 574.603753] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 574.603753] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] raise exception.PortBindingFailed(port_id=port['id']) [ 574.604229] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] nova.exception.PortBindingFailed: Binding failed for port 611560c7-4689-481c-9fd1-efc64eae1fd4, please check neutron logs for more information. [ 574.604229] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] [ 574.604229] env[62070]: INFO nova.compute.manager [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Terminating instance [ 574.604904] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Acquiring lock "refresh_cache-5936aded-90fc-4f77-8103-8c9e1912379c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.605120] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Acquired lock "refresh_cache-5936aded-90fc-4f77-8103-8c9e1912379c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.605934] env[62070]: DEBUG nova.network.neutron [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 574.608704] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e77920-5667-4b2f-af3a-70aceb9109f4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.618666] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7039ad4a-acf7-45c0-91b5-76efea295919 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.654234] env[62070]: DEBUG nova.policy [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '36884c76620f474c83d9e55c82690c01', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c7fb2d4e53bc4d878de093b6abc8bba7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 574.656287] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46463e5-48b1-41ea-ae60-89d944ac809e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.664801] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ae67d7-6085-44bd-b3b2-f02619f2b15b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.678540] env[62070]: DEBUG nova.compute.provider_tree [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 574.762592] env[62070]: DEBUG nova.compute.manager [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 574.936686] env[62070]: INFO nova.compute.manager [-] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Took 1.09 seconds to deallocate network for instance. [ 574.939878] env[62070]: DEBUG nova.compute.claims [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 574.939878] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.949639] env[62070]: INFO nova.scheduler.client.report [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Deleted allocations for instance 7221a720-8ab9-44fd-abe2-8f8fc19b6433 [ 575.071012] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Releasing lock "refresh_cache-283e7488-1240-475f-a74d-809251950774" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 575.071012] env[62070]: DEBUG nova.compute.manager [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 575.071012] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 575.072149] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3449a9-45af-4df7-bfd6-df5da86a3652 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.082258] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 575.085829] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2ae64d01-a84e-460e-a706-d0b3e82f565f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.093519] env[62070]: DEBUG oslo_vmware.api [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Waiting for the task: (returnval){ [ 575.093519] env[62070]: value = "task-1121419" [ 575.093519] env[62070]: _type = "Task" [ 575.093519] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.104664] env[62070]: DEBUG oslo_vmware.api [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': task-1121419, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.157357] env[62070]: DEBUG nova.network.neutron [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 575.181321] env[62070]: DEBUG nova.scheduler.client.report [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 575.409484] env[62070]: DEBUG nova.network.neutron [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.459187] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0ad29952-ef64-49f7-9793-5bdd8a8330c4 tempest-ServerExternalEventsTest-1235707480 tempest-ServerExternalEventsTest-1235707480-project-member] Lock "7221a720-8ab9-44fd-abe2-8f8fc19b6433" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.867s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.489792] env[62070]: DEBUG nova.compute.manager [req-41e8e2bf-f206-48b7-b626-03c3c89506d2 req-54d495b4-acac-463a-8430-ce3765365abc service nova] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Received event network-changed-611560c7-4689-481c-9fd1-efc64eae1fd4 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 575.490172] env[62070]: DEBUG nova.compute.manager [req-41e8e2bf-f206-48b7-b626-03c3c89506d2 req-54d495b4-acac-463a-8430-ce3765365abc service nova] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Refreshing instance network info cache due to event network-changed-611560c7-4689-481c-9fd1-efc64eae1fd4. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 575.490511] env[62070]: DEBUG oslo_concurrency.lockutils [req-41e8e2bf-f206-48b7-b626-03c3c89506d2 req-54d495b4-acac-463a-8430-ce3765365abc service nova] Acquiring lock "refresh_cache-5936aded-90fc-4f77-8103-8c9e1912379c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.607335] env[62070]: DEBUG oslo_vmware.api [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': task-1121419, 'name': PowerOffVM_Task, 'duration_secs': 0.114926} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.607335] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 575.607335] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 575.607335] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-14171bea-9387-4746-aa9b-dd37868aa439 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.633732] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 575.633812] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 575.634120] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Deleting the datastore file [datastore1] 283e7488-1240-475f-a74d-809251950774 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 575.634637] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ae0f4383-93b6-4726-a92e-db51cfa397df {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.642111] env[62070]: DEBUG oslo_vmware.api [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Waiting for the task: (returnval){ [ 575.642111] env[62070]: value = "task-1121421" [ 575.642111] env[62070]: _type = "Task" [ 575.642111] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.654996] env[62070]: DEBUG oslo_vmware.api [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': task-1121421, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.689088] env[62070]: DEBUG oslo_concurrency.lockutils [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.929s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.689088] env[62070]: ERROR nova.compute.manager [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port af01c0ca-546d-4b4d-a8cf-007e5f080e9c, please check neutron logs for more information. [ 575.689088] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Traceback (most recent call last): [ 575.689088] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 575.689088] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] self.driver.spawn(context, instance, image_meta, [ 575.689088] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 575.689088] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 575.689088] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 575.689088] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] vm_ref = self.build_virtual_machine(instance, [ 575.689903] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 575.689903] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] vif_infos = vmwarevif.get_vif_info(self._session, [ 575.689903] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 575.689903] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] for vif in network_info: [ 575.689903] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 575.689903] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] return self._sync_wrapper(fn, *args, **kwargs) [ 575.689903] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 575.689903] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] self.wait() [ 575.689903] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 575.689903] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] self[:] = self._gt.wait() [ 575.689903] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 575.689903] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] return self._exit_event.wait() [ 575.689903] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 575.690345] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] result = hub.switch() [ 575.690345] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 575.690345] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] return self.greenlet.switch() [ 575.690345] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 575.690345] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] result = function(*args, **kwargs) [ 575.690345] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 575.690345] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] return func(*args, **kwargs) [ 575.690345] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 575.690345] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] raise e [ 575.690345] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 575.690345] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] nwinfo = self.network_api.allocate_for_instance( [ 575.690345] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 575.690345] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] created_port_ids = self._update_ports_for_instance( [ 575.690666] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 575.690666] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] with excutils.save_and_reraise_exception(): [ 575.690666] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 575.690666] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] self.force_reraise() [ 575.690666] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 575.690666] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] raise self.value [ 575.690666] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 575.690666] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] updated_port = self._update_port( [ 575.690666] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 575.690666] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] _ensure_no_port_binding_failure(port) [ 575.690666] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 575.690666] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] raise exception.PortBindingFailed(port_id=port['id']) [ 575.690971] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] nova.exception.PortBindingFailed: Binding failed for port af01c0ca-546d-4b4d-a8cf-007e5f080e9c, please check neutron logs for more information. [ 575.690971] env[62070]: ERROR nova.compute.manager [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] [ 575.690971] env[62070]: DEBUG nova.compute.utils [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Binding failed for port af01c0ca-546d-4b4d-a8cf-007e5f080e9c, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 575.690971] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.587s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.694036] env[62070]: DEBUG nova.compute.manager [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Build of instance 87462fbe-d62d-4b40-880c-a1785c9ed5d4 was re-scheduled: Binding failed for port af01c0ca-546d-4b4d-a8cf-007e5f080e9c, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 575.694343] env[62070]: DEBUG nova.compute.manager [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 575.694938] env[62070]: DEBUG oslo_concurrency.lockutils [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Acquiring lock "refresh_cache-87462fbe-d62d-4b40-880c-a1785c9ed5d4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.694938] env[62070]: DEBUG oslo_concurrency.lockutils [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Acquired lock "refresh_cache-87462fbe-d62d-4b40-880c-a1785c9ed5d4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.695090] env[62070]: DEBUG nova.network.neutron [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 575.774855] env[62070]: DEBUG nova.compute.manager [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 575.817228] env[62070]: DEBUG nova.virt.hardware [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 575.817406] env[62070]: DEBUG nova.virt.hardware [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 575.817477] env[62070]: DEBUG nova.virt.hardware [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 575.817654] env[62070]: DEBUG nova.virt.hardware [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 575.817792] env[62070]: DEBUG nova.virt.hardware [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 575.817931] env[62070]: DEBUG nova.virt.hardware [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 575.818204] env[62070]: DEBUG nova.virt.hardware [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 575.818610] env[62070]: DEBUG nova.virt.hardware [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 575.818610] env[62070]: DEBUG nova.virt.hardware [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 575.818717] env[62070]: DEBUG nova.virt.hardware [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 575.818862] env[62070]: DEBUG nova.virt.hardware [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 575.819749] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a20ed23-edd0-413f-a61d-48b1981e01d8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.829158] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91199058-3653-41f8-84e8-44dc2cd6ffa6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.910619] env[62070]: DEBUG oslo_concurrency.lockutils [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Acquiring lock "ad0dd218-5e45-4d22-9d94-5c25ba8b22ec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.910868] env[62070]: DEBUG oslo_concurrency.lockutils [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Lock "ad0dd218-5e45-4d22-9d94-5c25ba8b22ec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.911256] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Releasing lock "refresh_cache-5936aded-90fc-4f77-8103-8c9e1912379c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 575.911626] env[62070]: DEBUG nova.compute.manager [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 575.911819] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 575.912437] env[62070]: DEBUG oslo_concurrency.lockutils [req-41e8e2bf-f206-48b7-b626-03c3c89506d2 req-54d495b4-acac-463a-8430-ce3765365abc service nova] Acquired lock "refresh_cache-5936aded-90fc-4f77-8103-8c9e1912379c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.912437] env[62070]: DEBUG nova.network.neutron [req-41e8e2bf-f206-48b7-b626-03c3c89506d2 req-54d495b4-acac-463a-8430-ce3765365abc service nova] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Refreshing network info cache for port 611560c7-4689-481c-9fd1-efc64eae1fd4 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 575.913252] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-966d4f76-ec91-4346-b622-a4fc5f45cef8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.922836] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc65d2b3-9ddc-4e61-9494-e1b4021dbdb0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.946386] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5936aded-90fc-4f77-8103-8c9e1912379c could not be found. [ 575.946691] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 575.946783] env[62070]: INFO nova.compute.manager [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Took 0.03 seconds to destroy the instance on the hypervisor. [ 575.947019] env[62070]: DEBUG oslo.service.loopingcall [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 575.947562] env[62070]: DEBUG nova.compute.manager [-] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 575.947664] env[62070]: DEBUG nova.network.neutron [-] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 575.962512] env[62070]: DEBUG nova.compute.manager [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 575.987186] env[62070]: DEBUG nova.network.neutron [-] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 576.158519] env[62070]: DEBUG oslo_vmware.api [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Task: {'id': task-1121421, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.093438} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.158835] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 576.159120] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 576.159677] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 576.159918] env[62070]: INFO nova.compute.manager [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] [instance: 283e7488-1240-475f-a74d-809251950774] Took 1.09 seconds to destroy the instance on the hypervisor. [ 576.160403] env[62070]: DEBUG oslo.service.loopingcall [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 576.160632] env[62070]: DEBUG nova.compute.manager [-] [instance: 283e7488-1240-475f-a74d-809251950774] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 576.160923] env[62070]: DEBUG nova.network.neutron [-] [instance: 283e7488-1240-475f-a74d-809251950774] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 576.192067] env[62070]: DEBUG nova.network.neutron [-] [instance: 283e7488-1240-475f-a74d-809251950774] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 576.246820] env[62070]: DEBUG nova.network.neutron [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 576.479205] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9edd1e81-d4eb-4822-b382-1452b1e216e7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.487219] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.489837] env[62070]: DEBUG nova.network.neutron [-] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.494424] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9af41cfd-9cf2-4383-97eb-5e3308172650 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.523356] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cc172ac-4359-48ee-be64-419e5bfff7aa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.531378] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73aa030b-9bb1-4483-b0f7-8194265e567b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.545089] env[62070]: DEBUG nova.compute.provider_tree [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 576.568803] env[62070]: DEBUG nova.network.neutron [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.608230] env[62070]: DEBUG nova.network.neutron [req-41e8e2bf-f206-48b7-b626-03c3c89506d2 req-54d495b4-acac-463a-8430-ce3765365abc service nova] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 576.694926] env[62070]: DEBUG nova.network.neutron [-] [instance: 283e7488-1240-475f-a74d-809251950774] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.817487] env[62070]: DEBUG nova.network.neutron [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Successfully created port: 900622a2-0f27-4e6e-9fd4-8a0a81d8d602 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 576.996158] env[62070]: INFO nova.compute.manager [-] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Took 1.05 seconds to deallocate network for instance. [ 577.003471] env[62070]: DEBUG nova.compute.claims [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 577.003471] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.034160] env[62070]: DEBUG nova.network.neutron [req-41e8e2bf-f206-48b7-b626-03c3c89506d2 req-54d495b4-acac-463a-8430-ce3765365abc service nova] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.051184] env[62070]: DEBUG nova.scheduler.client.report [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 577.071979] env[62070]: DEBUG oslo_concurrency.lockutils [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Releasing lock "refresh_cache-87462fbe-d62d-4b40-880c-a1785c9ed5d4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 577.073029] env[62070]: DEBUG nova.compute.manager [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 577.073029] env[62070]: DEBUG nova.compute.manager [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 577.073029] env[62070]: DEBUG nova.network.neutron [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 577.111123] env[62070]: DEBUG nova.network.neutron [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 577.198964] env[62070]: INFO nova.compute.manager [-] [instance: 283e7488-1240-475f-a74d-809251950774] Took 1.04 seconds to deallocate network for instance. [ 577.537047] env[62070]: DEBUG oslo_concurrency.lockutils [req-41e8e2bf-f206-48b7-b626-03c3c89506d2 req-54d495b4-acac-463a-8430-ce3765365abc service nova] Releasing lock "refresh_cache-5936aded-90fc-4f77-8103-8c9e1912379c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 577.557327] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.864s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 577.557327] env[62070]: ERROR nova.compute.manager [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fb4707e9-7728-45c0-9830-a99c0cf681b3, please check neutron logs for more information. [ 577.557327] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] Traceback (most recent call last): [ 577.557327] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 577.557327] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] self.driver.spawn(context, instance, image_meta, [ 577.557327] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 577.557327] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 577.557327] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 577.557327] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] vm_ref = self.build_virtual_machine(instance, [ 577.557620] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 577.557620] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] vif_infos = vmwarevif.get_vif_info(self._session, [ 577.557620] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 577.557620] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] for vif in network_info: [ 577.557620] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 577.557620] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] return self._sync_wrapper(fn, *args, **kwargs) [ 577.557620] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 577.557620] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] self.wait() [ 577.557620] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 577.557620] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] self[:] = self._gt.wait() [ 577.557620] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 577.557620] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] return self._exit_event.wait() [ 577.557620] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 577.558032] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] result = hub.switch() [ 577.558032] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 577.558032] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] return self.greenlet.switch() [ 577.558032] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 577.558032] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] result = function(*args, **kwargs) [ 577.558032] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 577.558032] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] return func(*args, **kwargs) [ 577.558032] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 577.558032] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] raise e [ 577.558032] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 577.558032] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] nwinfo = self.network_api.allocate_for_instance( [ 577.558032] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 577.558032] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] created_port_ids = self._update_ports_for_instance( [ 577.558355] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 577.558355] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] with excutils.save_and_reraise_exception(): [ 577.558355] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 577.558355] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] self.force_reraise() [ 577.558355] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 577.558355] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] raise self.value [ 577.558355] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 577.558355] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] updated_port = self._update_port( [ 577.558355] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 577.558355] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] _ensure_no_port_binding_failure(port) [ 577.558355] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 577.558355] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] raise exception.PortBindingFailed(port_id=port['id']) [ 577.558701] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] nova.exception.PortBindingFailed: Binding failed for port fb4707e9-7728-45c0-9830-a99c0cf681b3, please check neutron logs for more information. [ 577.558701] env[62070]: ERROR nova.compute.manager [instance: deee86eb-365b-4104-8687-72abdbf3807f] [ 577.558701] env[62070]: DEBUG nova.compute.utils [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Binding failed for port fb4707e9-7728-45c0-9830-a99c0cf681b3, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 577.558701] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.646s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.560953] env[62070]: DEBUG nova.compute.manager [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Build of instance deee86eb-365b-4104-8687-72abdbf3807f was re-scheduled: Binding failed for port fb4707e9-7728-45c0-9830-a99c0cf681b3, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 577.560953] env[62070]: DEBUG nova.compute.manager [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 577.560953] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquiring lock "refresh_cache-deee86eb-365b-4104-8687-72abdbf3807f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 577.563159] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquired lock "refresh_cache-deee86eb-365b-4104-8687-72abdbf3807f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 577.567034] env[62070]: DEBUG nova.network.neutron [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 577.615019] env[62070]: DEBUG nova.network.neutron [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.708399] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.119954] env[62070]: INFO nova.compute.manager [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] [instance: 87462fbe-d62d-4b40-880c-a1785c9ed5d4] Took 1.05 seconds to deallocate network for instance. [ 578.216962] env[62070]: DEBUG nova.network.neutron [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 578.365876] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fca8fab-8185-4b59-9b54-10965231a364 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.375497] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-496f6a43-aa23-45b1-8369-1eb18dc1722d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.413347] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91f9311-09d5-41f7-8bda-8c0f62d6cb4c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.420989] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d726250-50a8-44b8-8b04-5515b968dd4f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.436927] env[62070]: DEBUG nova.compute.provider_tree [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 578.461458] env[62070]: DEBUG nova.network.neutron [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.885774] env[62070]: DEBUG nova.compute.manager [req-6038e90e-376b-4300-a391-919be3ebb7ad req-3c762f4f-a237-4c5d-be2a-4be209ac49e6 service nova] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Received event network-vif-deleted-611560c7-4689-481c-9fd1-efc64eae1fd4 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 578.939684] env[62070]: DEBUG nova.scheduler.client.report [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 578.965831] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Releasing lock "refresh_cache-deee86eb-365b-4104-8687-72abdbf3807f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.966258] env[62070]: DEBUG nova.compute.manager [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 578.966502] env[62070]: DEBUG nova.compute.manager [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 578.966686] env[62070]: DEBUG nova.network.neutron [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 579.024978] env[62070]: DEBUG nova.network.neutron [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 579.175697] env[62070]: INFO nova.scheduler.client.report [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Deleted allocations for instance 87462fbe-d62d-4b40-880c-a1785c9ed5d4 [ 579.447622] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.890s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.448252] env[62070]: ERROR nova.compute.manager [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e3164438-df26-48c5-84eb-9925989f48e6, please check neutron logs for more information. [ 579.448252] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Traceback (most recent call last): [ 579.448252] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 579.448252] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] self.driver.spawn(context, instance, image_meta, [ 579.448252] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 579.448252] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] self._vmops.spawn(context, instance, image_meta, injected_files, [ 579.448252] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 579.448252] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] vm_ref = self.build_virtual_machine(instance, [ 579.448252] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 579.448252] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] vif_infos = vmwarevif.get_vif_info(self._session, [ 579.448252] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 579.448611] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] for vif in network_info: [ 579.448611] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 579.448611] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] return self._sync_wrapper(fn, *args, **kwargs) [ 579.448611] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 579.448611] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] self.wait() [ 579.448611] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 579.448611] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] self[:] = self._gt.wait() [ 579.448611] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 579.448611] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] return self._exit_event.wait() [ 579.448611] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 579.448611] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] result = hub.switch() [ 579.448611] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 579.448611] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] return self.greenlet.switch() [ 579.448928] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 579.448928] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] result = function(*args, **kwargs) [ 579.448928] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 579.448928] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] return func(*args, **kwargs) [ 579.448928] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 579.448928] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] raise e [ 579.448928] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 579.448928] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] nwinfo = self.network_api.allocate_for_instance( [ 579.448928] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 579.448928] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] created_port_ids = self._update_ports_for_instance( [ 579.448928] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 579.448928] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] with excutils.save_and_reraise_exception(): [ 579.448928] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 579.449365] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] self.force_reraise() [ 579.449365] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 579.449365] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] raise self.value [ 579.449365] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 579.449365] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] updated_port = self._update_port( [ 579.449365] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 579.449365] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] _ensure_no_port_binding_failure(port) [ 579.449365] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 579.449365] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] raise exception.PortBindingFailed(port_id=port['id']) [ 579.449365] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] nova.exception.PortBindingFailed: Binding failed for port e3164438-df26-48c5-84eb-9925989f48e6, please check neutron logs for more information. [ 579.449365] env[62070]: ERROR nova.compute.manager [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] [ 579.449668] env[62070]: DEBUG nova.compute.utils [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Binding failed for port e3164438-df26-48c5-84eb-9925989f48e6, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 579.451582] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.722s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.456220] env[62070]: DEBUG nova.compute.manager [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Build of instance 9b95dfcb-718e-478d-85bc-7479be9b67de was re-scheduled: Binding failed for port e3164438-df26-48c5-84eb-9925989f48e6, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 579.456663] env[62070]: DEBUG nova.compute.manager [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 579.456877] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "refresh_cache-9b95dfcb-718e-478d-85bc-7479be9b67de" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.458232] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquired lock "refresh_cache-9b95dfcb-718e-478d-85bc-7479be9b67de" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.458684] env[62070]: DEBUG nova.network.neutron [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 579.528868] env[62070]: DEBUG nova.network.neutron [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 579.691149] env[62070]: DEBUG oslo_concurrency.lockutils [None req-abc3cfd8-5b88-48b6-95da-0d8d4239d4c5 tempest-ServersV294TestFqdnHostnames-2059713138 tempest-ServersV294TestFqdnHostnames-2059713138-project-member] Lock "87462fbe-d62d-4b40-880c-a1785c9ed5d4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.611s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.922623] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Acquiring lock "3ee4e051-f51d-4840-a918-fdedad020557" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.922869] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Lock "3ee4e051-f51d-4840-a918-fdedad020557" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.036015] env[62070]: INFO nova.compute.manager [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: deee86eb-365b-4104-8687-72abdbf3807f] Took 1.07 seconds to deallocate network for instance. [ 580.069276] env[62070]: DEBUG nova.network.neutron [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 580.197769] env[62070]: DEBUG nova.compute.manager [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 580.260114] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c198418e-e22a-4676-9fd5-1086d79e88ff {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.269363] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66a5e81-1e85-4c1e-b4a8-fdf990fd9935 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.303486] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2627b59f-804c-47fe-9576-7af5648bf1f8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.311180] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fcf8c1e-5b53-45b7-8112-cc0566fa9598 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.324560] env[62070]: DEBUG nova.compute.provider_tree [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 580.632832] env[62070]: DEBUG nova.network.neutron [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 580.730381] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.829528] env[62070]: DEBUG nova.scheduler.client.report [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 581.085801] env[62070]: INFO nova.scheduler.client.report [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Deleted allocations for instance deee86eb-365b-4104-8687-72abdbf3807f [ 581.140285] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Releasing lock "refresh_cache-9b95dfcb-718e-478d-85bc-7479be9b67de" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 581.140408] env[62070]: DEBUG nova.compute.manager [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 581.141635] env[62070]: DEBUG nova.compute.manager [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 581.141635] env[62070]: DEBUG nova.network.neutron [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 581.189949] env[62070]: DEBUG nova.network.neutron [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 581.342888] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.891s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.343560] env[62070]: ERROR nova.compute.manager [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2b87ed7e-d1f9-4892-bf38-4d0846594037, please check neutron logs for more information. [ 581.343560] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Traceback (most recent call last): [ 581.343560] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 581.343560] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] self.driver.spawn(context, instance, image_meta, [ 581.343560] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 581.343560] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] self._vmops.spawn(context, instance, image_meta, injected_files, [ 581.343560] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 581.343560] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] vm_ref = self.build_virtual_machine(instance, [ 581.343560] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 581.343560] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] vif_infos = vmwarevif.get_vif_info(self._session, [ 581.343560] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 581.344037] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] for vif in network_info: [ 581.344037] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 581.344037] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] return self._sync_wrapper(fn, *args, **kwargs) [ 581.344037] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 581.344037] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] self.wait() [ 581.344037] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 581.344037] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] self[:] = self._gt.wait() [ 581.344037] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 581.344037] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] return self._exit_event.wait() [ 581.344037] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 581.344037] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] result = hub.switch() [ 581.344037] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 581.344037] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] return self.greenlet.switch() [ 581.345530] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 581.345530] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] result = function(*args, **kwargs) [ 581.345530] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 581.345530] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] return func(*args, **kwargs) [ 581.345530] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 581.345530] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] raise e [ 581.345530] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 581.345530] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] nwinfo = self.network_api.allocate_for_instance( [ 581.345530] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 581.345530] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] created_port_ids = self._update_ports_for_instance( [ 581.345530] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 581.345530] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] with excutils.save_and_reraise_exception(): [ 581.345530] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 581.345888] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] self.force_reraise() [ 581.345888] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 581.345888] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] raise self.value [ 581.345888] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 581.345888] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] updated_port = self._update_port( [ 581.345888] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 581.345888] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] _ensure_no_port_binding_failure(port) [ 581.345888] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 581.345888] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] raise exception.PortBindingFailed(port_id=port['id']) [ 581.345888] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] nova.exception.PortBindingFailed: Binding failed for port 2b87ed7e-d1f9-4892-bf38-4d0846594037, please check neutron logs for more information. [ 581.345888] env[62070]: ERROR nova.compute.manager [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] [ 581.346366] env[62070]: DEBUG nova.compute.utils [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Binding failed for port 2b87ed7e-d1f9-4892-bf38-4d0846594037, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 581.346683] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.959s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.351821] env[62070]: DEBUG nova.compute.manager [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Build of instance 9fd18ad4-7c72-4a13-8c29-da660a060020 was re-scheduled: Binding failed for port 2b87ed7e-d1f9-4892-bf38-4d0846594037, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 581.351821] env[62070]: DEBUG nova.compute.manager [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 581.351821] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "refresh_cache-9fd18ad4-7c72-4a13-8c29-da660a060020" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 581.351821] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired lock "refresh_cache-9fd18ad4-7c72-4a13-8c29-da660a060020" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 581.352173] env[62070]: DEBUG nova.network.neutron [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 581.593632] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cb6fd1fb-d5f4-4d43-abd6-fc98c9bb8564 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Lock "deee86eb-365b-4104-8687-72abdbf3807f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.899s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.636114] env[62070]: ERROR nova.compute.manager [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 900622a2-0f27-4e6e-9fd4-8a0a81d8d602, please check neutron logs for more information. [ 581.636114] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 581.636114] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 581.636114] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 581.636114] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 581.636114] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 581.636114] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 581.636114] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 581.636114] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 581.636114] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 581.636114] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 581.636114] env[62070]: ERROR nova.compute.manager raise self.value [ 581.636114] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 581.636114] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 581.636114] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 581.636114] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 581.636745] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 581.636745] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 581.636745] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 900622a2-0f27-4e6e-9fd4-8a0a81d8d602, please check neutron logs for more information. [ 581.636745] env[62070]: ERROR nova.compute.manager [ 581.636745] env[62070]: Traceback (most recent call last): [ 581.636745] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 581.636745] env[62070]: listener.cb(fileno) [ 581.636745] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 581.636745] env[62070]: result = function(*args, **kwargs) [ 581.636745] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 581.636745] env[62070]: return func(*args, **kwargs) [ 581.636745] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 581.636745] env[62070]: raise e [ 581.636745] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 581.636745] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 581.636745] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 581.636745] env[62070]: created_port_ids = self._update_ports_for_instance( [ 581.636745] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 581.636745] env[62070]: with excutils.save_and_reraise_exception(): [ 581.636745] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 581.636745] env[62070]: self.force_reraise() [ 581.636745] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 581.636745] env[62070]: raise self.value [ 581.636745] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 581.636745] env[62070]: updated_port = self._update_port( [ 581.636745] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 581.636745] env[62070]: _ensure_no_port_binding_failure(port) [ 581.636745] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 581.636745] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 581.637670] env[62070]: nova.exception.PortBindingFailed: Binding failed for port 900622a2-0f27-4e6e-9fd4-8a0a81d8d602, please check neutron logs for more information. [ 581.637670] env[62070]: Removing descriptor: 19 [ 581.637670] env[62070]: ERROR nova.compute.manager [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 900622a2-0f27-4e6e-9fd4-8a0a81d8d602, please check neutron logs for more information. [ 581.637670] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Traceback (most recent call last): [ 581.637670] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 581.637670] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] yield resources [ 581.637670] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 581.637670] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] self.driver.spawn(context, instance, image_meta, [ 581.637670] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 581.637670] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] self._vmops.spawn(context, instance, image_meta, injected_files, [ 581.637670] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 581.637670] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] vm_ref = self.build_virtual_machine(instance, [ 581.638068] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 581.638068] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] vif_infos = vmwarevif.get_vif_info(self._session, [ 581.638068] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 581.638068] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] for vif in network_info: [ 581.638068] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 581.638068] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] return self._sync_wrapper(fn, *args, **kwargs) [ 581.638068] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 581.638068] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] self.wait() [ 581.638068] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 581.638068] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] self[:] = self._gt.wait() [ 581.638068] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 581.638068] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] return self._exit_event.wait() [ 581.638068] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 581.638402] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] result = hub.switch() [ 581.638402] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 581.638402] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] return self.greenlet.switch() [ 581.638402] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 581.638402] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] result = function(*args, **kwargs) [ 581.638402] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 581.638402] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] return func(*args, **kwargs) [ 581.638402] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 581.638402] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] raise e [ 581.638402] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 581.638402] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] nwinfo = self.network_api.allocate_for_instance( [ 581.638402] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 581.638402] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] created_port_ids = self._update_ports_for_instance( [ 581.638748] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 581.638748] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] with excutils.save_and_reraise_exception(): [ 581.638748] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 581.638748] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] self.force_reraise() [ 581.638748] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 581.638748] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] raise self.value [ 581.638748] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 581.638748] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] updated_port = self._update_port( [ 581.638748] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 581.638748] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] _ensure_no_port_binding_failure(port) [ 581.638748] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 581.638748] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] raise exception.PortBindingFailed(port_id=port['id']) [ 581.639088] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] nova.exception.PortBindingFailed: Binding failed for port 900622a2-0f27-4e6e-9fd4-8a0a81d8d602, please check neutron logs for more information. [ 581.639088] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] [ 581.639088] env[62070]: INFO nova.compute.manager [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Terminating instance [ 581.640310] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Acquiring lock "refresh_cache-317f20e9-6ba1-4b41-b298-5dd844f323ac" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 581.640508] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Acquired lock "refresh_cache-317f20e9-6ba1-4b41-b298-5dd844f323ac" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 581.640738] env[62070]: DEBUG nova.network.neutron [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 581.696899] env[62070]: DEBUG nova.network.neutron [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 581.893549] env[62070]: DEBUG nova.network.neutron [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 582.080411] env[62070]: DEBUG nova.compute.manager [req-74d6791c-40e1-4b8a-968f-ca21269832af req-c7ca4609-cba3-4328-a590-a810cce7fee4 service nova] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Received event network-changed-900622a2-0f27-4e6e-9fd4-8a0a81d8d602 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 582.080411] env[62070]: DEBUG nova.compute.manager [req-74d6791c-40e1-4b8a-968f-ca21269832af req-c7ca4609-cba3-4328-a590-a810cce7fee4 service nova] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Refreshing instance network info cache due to event network-changed-900622a2-0f27-4e6e-9fd4-8a0a81d8d602. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 582.080411] env[62070]: DEBUG oslo_concurrency.lockutils [req-74d6791c-40e1-4b8a-968f-ca21269832af req-c7ca4609-cba3-4328-a590-a810cce7fee4 service nova] Acquiring lock "refresh_cache-317f20e9-6ba1-4b41-b298-5dd844f323ac" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 582.101018] env[62070]: DEBUG nova.compute.manager [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 582.136356] env[62070]: DEBUG nova.network.neutron [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 582.177613] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8313b2-0f93-4c55-b4e1-6e57aa4b0b4a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.182864] env[62070]: DEBUG nova.network.neutron [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 582.191886] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29126c38-4e35-4a90-a13f-073ce420fef3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.225035] env[62070]: INFO nova.compute.manager [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 9b95dfcb-718e-478d-85bc-7479be9b67de] Took 1.08 seconds to deallocate network for instance. [ 582.232877] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa447713-4577-4291-870e-6057cc247329 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.241560] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f2edd94-eca7-48f5-adc4-8eb34c97a012 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.261430] env[62070]: DEBUG nova.compute.provider_tree [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 582.345348] env[62070]: DEBUG nova.network.neutron [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 582.631178] env[62070]: DEBUG oslo_concurrency.lockutils [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.642646] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Releasing lock "refresh_cache-9fd18ad4-7c72-4a13-8c29-da660a060020" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 582.643052] env[62070]: DEBUG nova.compute.manager [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 582.643149] env[62070]: DEBUG nova.compute.manager [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 582.643234] env[62070]: DEBUG nova.network.neutron [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 582.665738] env[62070]: DEBUG nova.network.neutron [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 582.720502] env[62070]: DEBUG oslo_concurrency.lockutils [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "73ab65b7-32e7-4206-8f31-466085319c71" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.720806] env[62070]: DEBUG oslo_concurrency.lockutils [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "73ab65b7-32e7-4206-8f31-466085319c71" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.771242] env[62070]: DEBUG nova.scheduler.client.report [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 582.848570] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Releasing lock "refresh_cache-317f20e9-6ba1-4b41-b298-5dd844f323ac" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 582.853347] env[62070]: DEBUG nova.compute.manager [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 582.853347] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 582.853347] env[62070]: DEBUG oslo_concurrency.lockutils [req-74d6791c-40e1-4b8a-968f-ca21269832af req-c7ca4609-cba3-4328-a590-a810cce7fee4 service nova] Acquired lock "refresh_cache-317f20e9-6ba1-4b41-b298-5dd844f323ac" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 582.853347] env[62070]: DEBUG nova.network.neutron [req-74d6791c-40e1-4b8a-968f-ca21269832af req-c7ca4609-cba3-4328-a590-a810cce7fee4 service nova] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Refreshing network info cache for port 900622a2-0f27-4e6e-9fd4-8a0a81d8d602 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 582.854508] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-15c1dfce-b6fe-477e-9e69-cd825cf5c2ec {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.865097] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b3c80ba-65b2-46e7-9426-e36d671ca985 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.887771] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 317f20e9-6ba1-4b41-b298-5dd844f323ac could not be found. [ 582.888134] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 582.888221] env[62070]: INFO nova.compute.manager [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Took 0.04 seconds to destroy the instance on the hypervisor. [ 582.888517] env[62070]: DEBUG oslo.service.loopingcall [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 582.888819] env[62070]: DEBUG nova.compute.manager [-] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 582.888972] env[62070]: DEBUG nova.network.neutron [-] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 583.078119] env[62070]: DEBUG nova.network.neutron [-] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 583.170601] env[62070]: DEBUG nova.network.neutron [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 583.265668] env[62070]: INFO nova.scheduler.client.report [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Deleted allocations for instance 9b95dfcb-718e-478d-85bc-7479be9b67de [ 583.286908] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.940s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 583.288660] env[62070]: ERROR nova.compute.manager [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e3ce78d3-0012-4892-8bad-29f3337f38db, please check neutron logs for more information. [ 583.288660] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Traceback (most recent call last): [ 583.288660] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 583.288660] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] self.driver.spawn(context, instance, image_meta, [ 583.288660] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 583.288660] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 583.288660] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 583.288660] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] vm_ref = self.build_virtual_machine(instance, [ 583.288660] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 583.288660] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] vif_infos = vmwarevif.get_vif_info(self._session, [ 583.288660] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 583.289050] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] for vif in network_info: [ 583.289050] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 583.289050] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] return self._sync_wrapper(fn, *args, **kwargs) [ 583.289050] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 583.289050] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] self.wait() [ 583.289050] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 583.289050] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] self[:] = self._gt.wait() [ 583.289050] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 583.289050] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] return self._exit_event.wait() [ 583.289050] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 583.289050] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] result = hub.switch() [ 583.289050] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 583.289050] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] return self.greenlet.switch() [ 583.289445] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 583.289445] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] result = function(*args, **kwargs) [ 583.289445] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 583.289445] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] return func(*args, **kwargs) [ 583.289445] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 583.289445] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] raise e [ 583.289445] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 583.289445] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] nwinfo = self.network_api.allocate_for_instance( [ 583.289445] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 583.289445] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] created_port_ids = self._update_ports_for_instance( [ 583.289445] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 583.289445] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] with excutils.save_and_reraise_exception(): [ 583.289445] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 583.289787] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] self.force_reraise() [ 583.289787] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 583.289787] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] raise self.value [ 583.289787] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 583.289787] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] updated_port = self._update_port( [ 583.289787] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 583.289787] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] _ensure_no_port_binding_failure(port) [ 583.289787] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 583.289787] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] raise exception.PortBindingFailed(port_id=port['id']) [ 583.289787] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] nova.exception.PortBindingFailed: Binding failed for port e3ce78d3-0012-4892-8bad-29f3337f38db, please check neutron logs for more information. [ 583.289787] env[62070]: ERROR nova.compute.manager [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] [ 583.290090] env[62070]: DEBUG nova.compute.utils [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Binding failed for port e3ce78d3-0012-4892-8bad-29f3337f38db, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 583.292776] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.121s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.296264] env[62070]: INFO nova.compute.claims [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 583.299426] env[62070]: DEBUG nova.compute.manager [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Build of instance 768b0b5f-7d20-4bc6-87f6-b66adcce42c6 was re-scheduled: Binding failed for port e3ce78d3-0012-4892-8bad-29f3337f38db, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 583.299936] env[62070]: DEBUG nova.compute.manager [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 583.300192] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Acquiring lock "refresh_cache-768b0b5f-7d20-4bc6-87f6-b66adcce42c6" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.300341] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Acquired lock "refresh_cache-768b0b5f-7d20-4bc6-87f6-b66adcce42c6" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.300500] env[62070]: DEBUG nova.network.neutron [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 583.405368] env[62070]: DEBUG nova.network.neutron [req-74d6791c-40e1-4b8a-968f-ca21269832af req-c7ca4609-cba3-4328-a590-a810cce7fee4 service nova] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 583.561641] env[62070]: DEBUG nova.network.neutron [req-74d6791c-40e1-4b8a-968f-ca21269832af req-c7ca4609-cba3-4328-a590-a810cce7fee4 service nova] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 583.583087] env[62070]: DEBUG nova.network.neutron [-] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 583.675258] env[62070]: INFO nova.compute.manager [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 9fd18ad4-7c72-4a13-8c29-da660a060020] Took 1.03 seconds to deallocate network for instance. [ 583.775729] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26488158-5329-4c5b-9a82-fbfcc788dbaa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "9b95dfcb-718e-478d-85bc-7479be9b67de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.940s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 583.855730] env[62070]: DEBUG nova.network.neutron [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 583.985296] env[62070]: DEBUG nova.network.neutron [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.066230] env[62070]: DEBUG oslo_concurrency.lockutils [req-74d6791c-40e1-4b8a-968f-ca21269832af req-c7ca4609-cba3-4328-a590-a810cce7fee4 service nova] Releasing lock "refresh_cache-317f20e9-6ba1-4b41-b298-5dd844f323ac" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.089579] env[62070]: INFO nova.compute.manager [-] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Took 1.20 seconds to deallocate network for instance. [ 584.093383] env[62070]: DEBUG nova.compute.claims [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 584.093577] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.286744] env[62070]: DEBUG nova.compute.manager [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 584.323535] env[62070]: DEBUG oslo_concurrency.lockutils [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Acquiring lock "73cb4a44-a4d9-4c8c-8472-f99b5d449cec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.323674] env[62070]: DEBUG oslo_concurrency.lockutils [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Lock "73cb4a44-a4d9-4c8c-8472-f99b5d449cec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.353935] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Acquiring lock "95edf3d1-a987-4768-93be-1e045d7bfa99" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.353935] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Lock "95edf3d1-a987-4768-93be-1e045d7bfa99" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.490258] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Releasing lock "refresh_cache-768b0b5f-7d20-4bc6-87f6-b66adcce42c6" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.490258] env[62070]: DEBUG nova.compute.manager [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 584.490258] env[62070]: DEBUG nova.compute.manager [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 584.490258] env[62070]: DEBUG nova.network.neutron [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 584.519080] env[62070]: DEBUG nova.network.neutron [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 584.576464] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4175fc3c-aea9-4e12-9df3-2b1e23b6484a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.585244] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04fe7e19-e7f2-423f-aa87-1d2d8a800483 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.616815] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ba6527-97fa-4527-b026-bc2d463ebebf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.624289] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a2e1b79-0d71-4085-be00-e740f2b8e96b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.642995] env[62070]: DEBUG nova.compute.provider_tree [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 584.720367] env[62070]: INFO nova.scheduler.client.report [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Deleted allocations for instance 9fd18ad4-7c72-4a13-8c29-da660a060020 [ 584.810789] env[62070]: DEBUG oslo_concurrency.lockutils [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.023153] env[62070]: DEBUG nova.network.neutron [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.150407] env[62070]: DEBUG nova.scheduler.client.report [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 585.229753] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e375be83-77df-4817-8661-3151cdf18f15 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "9fd18ad4-7c72-4a13-8c29-da660a060020" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.184s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.426529] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Acquiring lock "b0134b0f-23b4-4d34-b144-71ccdd9fba72" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.426529] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Lock "b0134b0f-23b4-4d34-b144-71ccdd9fba72" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.526595] env[62070]: INFO nova.compute.manager [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: 768b0b5f-7d20-4bc6-87f6-b66adcce42c6] Took 1.04 seconds to deallocate network for instance. [ 585.653358] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.361s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.653924] env[62070]: DEBUG nova.compute.manager [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 585.656942] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 11.766s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.733387] env[62070]: DEBUG nova.compute.manager [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 586.165226] env[62070]: DEBUG nova.compute.utils [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 586.174282] env[62070]: DEBUG nova.compute.manager [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 586.174399] env[62070]: DEBUG nova.network.neutron [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 586.267556] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.309351] env[62070]: DEBUG nova.policy [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0c0ebc949c7740cab600b83fc99240a5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '69a43a164f7d4e0185975eee42f33f1d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 586.571053] env[62070]: INFO nova.scheduler.client.report [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Deleted allocations for instance 768b0b5f-7d20-4bc6-87f6-b66adcce42c6 [ 586.676586] env[62070]: DEBUG nova.compute.manager [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 586.709820] env[62070]: WARNING nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 283e7488-1240-475f-a74d-809251950774 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 586.709820] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 586.709820] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 5936aded-90fc-4f77-8103-8c9e1912379c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 586.709820] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 317f20e9-6ba1-4b41-b298-5dd844f323ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 586.709999] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 88251634-8add-4216-b789-dfee77a1ae09 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 586.713437] env[62070]: DEBUG nova.compute.manager [req-ad7c7781-59c1-4777-a0a5-b46ac36e8670 req-373ef713-e7ea-491c-a862-95e442dff32a service nova] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Received event network-vif-deleted-900622a2-0f27-4e6e-9fd4-8a0a81d8d602 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 586.827793] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Acquiring lock "d41f73e0-a188-4cc4-8391-938178aad496" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.827793] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Lock "d41f73e0-a188-4cc4-8391-938178aad496" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 587.083291] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ef89e838-db76-4acf-b7ea-6f620e71b226 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Lock "768b0b5f-7d20-4bc6-87f6-b66adcce42c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.430s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 587.166721] env[62070]: DEBUG nova.network.neutron [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Successfully created port: 00c97619-997b-46ca-bd58-aa05d59eb0f7 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 587.211691] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 495a15b2-20bd-44d2-8020-816031e89832 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 587.588133] env[62070]: DEBUG nova.compute.manager [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 587.688506] env[62070]: DEBUG nova.compute.manager [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 587.714889] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 587.720477] env[62070]: DEBUG nova.virt.hardware [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 587.720976] env[62070]: DEBUG nova.virt.hardware [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 587.721282] env[62070]: DEBUG nova.virt.hardware [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 587.721589] env[62070]: DEBUG nova.virt.hardware [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 587.723118] env[62070]: DEBUG nova.virt.hardware [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 587.723118] env[62070]: DEBUG nova.virt.hardware [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 587.725353] env[62070]: DEBUG nova.virt.hardware [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 587.725478] env[62070]: DEBUG nova.virt.hardware [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 587.725668] env[62070]: DEBUG nova.virt.hardware [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 587.725839] env[62070]: DEBUG nova.virt.hardware [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 587.726019] env[62070]: DEBUG nova.virt.hardware [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 587.726984] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e49df242-156b-473b-9203-b2e214355e7a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.738670] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b8f94e1-5e78-4112-8f94-0ac4258e73a7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.087026] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Acquiring lock "a5b98f92-d287-4d40-8a21-d2de64026970" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.087026] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Lock "a5b98f92-d287-4d40-8a21-d2de64026970" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.113434] env[62070]: DEBUG oslo_concurrency.lockutils [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.218241] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 2226072d-16f2-4ea1-a56c-d866554c7379 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 588.725348] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance d8478b63-3a62-4afa-950b-edf9774e8ea8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 588.872617] env[62070]: ERROR nova.compute.manager [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 00c97619-997b-46ca-bd58-aa05d59eb0f7, please check neutron logs for more information. [ 588.872617] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 588.872617] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 588.872617] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 588.872617] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 588.872617] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 588.872617] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 588.872617] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 588.872617] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 588.872617] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 588.872617] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 588.872617] env[62070]: ERROR nova.compute.manager raise self.value [ 588.872617] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 588.872617] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 588.872617] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 588.872617] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 588.873110] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 588.873110] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 588.873110] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 00c97619-997b-46ca-bd58-aa05d59eb0f7, please check neutron logs for more information. [ 588.873110] env[62070]: ERROR nova.compute.manager [ 588.873110] env[62070]: Traceback (most recent call last): [ 588.873110] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 588.873110] env[62070]: listener.cb(fileno) [ 588.873110] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 588.873110] env[62070]: result = function(*args, **kwargs) [ 588.873110] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 588.873110] env[62070]: return func(*args, **kwargs) [ 588.873110] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 588.873110] env[62070]: raise e [ 588.873110] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 588.873110] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 588.873110] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 588.873110] env[62070]: created_port_ids = self._update_ports_for_instance( [ 588.873110] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 588.873110] env[62070]: with excutils.save_and_reraise_exception(): [ 588.873110] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 588.873110] env[62070]: self.force_reraise() [ 588.873110] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 588.873110] env[62070]: raise self.value [ 588.873110] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 588.873110] env[62070]: updated_port = self._update_port( [ 588.873110] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 588.873110] env[62070]: _ensure_no_port_binding_failure(port) [ 588.873110] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 588.873110] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 588.873906] env[62070]: nova.exception.PortBindingFailed: Binding failed for port 00c97619-997b-46ca-bd58-aa05d59eb0f7, please check neutron logs for more information. [ 588.873906] env[62070]: Removing descriptor: 19 [ 588.873906] env[62070]: ERROR nova.compute.manager [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 00c97619-997b-46ca-bd58-aa05d59eb0f7, please check neutron logs for more information. [ 588.873906] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] Traceback (most recent call last): [ 588.873906] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 588.873906] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] yield resources [ 588.873906] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 588.873906] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] self.driver.spawn(context, instance, image_meta, [ 588.873906] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 588.873906] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] self._vmops.spawn(context, instance, image_meta, injected_files, [ 588.873906] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 588.873906] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] vm_ref = self.build_virtual_machine(instance, [ 588.874360] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 588.874360] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] vif_infos = vmwarevif.get_vif_info(self._session, [ 588.874360] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 588.874360] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] for vif in network_info: [ 588.874360] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 588.874360] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] return self._sync_wrapper(fn, *args, **kwargs) [ 588.874360] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 588.874360] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] self.wait() [ 588.874360] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 588.874360] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] self[:] = self._gt.wait() [ 588.874360] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 588.874360] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] return self._exit_event.wait() [ 588.874360] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 588.874729] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] result = hub.switch() [ 588.874729] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 588.874729] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] return self.greenlet.switch() [ 588.874729] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 588.874729] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] result = function(*args, **kwargs) [ 588.874729] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 588.874729] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] return func(*args, **kwargs) [ 588.874729] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 588.874729] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] raise e [ 588.874729] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 588.874729] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] nwinfo = self.network_api.allocate_for_instance( [ 588.874729] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 588.874729] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] created_port_ids = self._update_ports_for_instance( [ 588.875102] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 588.875102] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] with excutils.save_and_reraise_exception(): [ 588.875102] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 588.875102] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] self.force_reraise() [ 588.875102] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 588.875102] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] raise self.value [ 588.875102] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 588.875102] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] updated_port = self._update_port( [ 588.875102] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 588.875102] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] _ensure_no_port_binding_failure(port) [ 588.875102] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 588.875102] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] raise exception.PortBindingFailed(port_id=port['id']) [ 588.875528] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] nova.exception.PortBindingFailed: Binding failed for port 00c97619-997b-46ca-bd58-aa05d59eb0f7, please check neutron logs for more information. [ 588.875528] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] [ 588.875528] env[62070]: INFO nova.compute.manager [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Terminating instance [ 588.880213] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Acquiring lock "refresh_cache-88251634-8add-4216-b789-dfee77a1ae09" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 588.881179] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Acquired lock "refresh_cache-88251634-8add-4216-b789-dfee77a1ae09" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.881508] env[62070]: DEBUG nova.network.neutron [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 589.228358] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance d7a90be3-d3d6-4626-944b-b907cf7fb64d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 589.277721] env[62070]: DEBUG oslo_concurrency.lockutils [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Acquiring lock "adccca24-ed77-410b-8b69-19137cadafbd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.277955] env[62070]: DEBUG oslo_concurrency.lockutils [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Lock "adccca24-ed77-410b-8b69-19137cadafbd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.409360] env[62070]: DEBUG nova.network.neutron [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 589.596074] env[62070]: DEBUG nova.network.neutron [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.732309] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance ad0dd218-5e45-4d22-9d94-5c25ba8b22ec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 590.098193] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Releasing lock "refresh_cache-88251634-8add-4216-b789-dfee77a1ae09" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 590.098642] env[62070]: DEBUG nova.compute.manager [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 590.098862] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 590.099161] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-80da191b-71fa-4370-b2fc-f8e1ec3cec61 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.108044] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab176a13-1f3b-4b40-a410-5927e0e4f8aa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.128879] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 88251634-8add-4216-b789-dfee77a1ae09 could not be found. [ 590.129400] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 590.129400] env[62070]: INFO nova.compute.manager [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Took 0.03 seconds to destroy the instance on the hypervisor. [ 590.129658] env[62070]: DEBUG oslo.service.loopingcall [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 590.129763] env[62070]: DEBUG nova.compute.manager [-] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 590.129852] env[62070]: DEBUG nova.network.neutron [-] [instance: 88251634-8add-4216-b789-dfee77a1ae09] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 590.151824] env[62070]: DEBUG nova.network.neutron [-] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 590.235591] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 3ee4e051-f51d-4840-a918-fdedad020557 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 590.300525] env[62070]: DEBUG nova.compute.manager [req-2788d18b-52c4-42bf-b18b-6a96b40c330d req-7953f80a-46fa-457d-ac4b-9e3bff478ab8 service nova] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Received event network-changed-00c97619-997b-46ca-bd58-aa05d59eb0f7 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 590.300525] env[62070]: DEBUG nova.compute.manager [req-2788d18b-52c4-42bf-b18b-6a96b40c330d req-7953f80a-46fa-457d-ac4b-9e3bff478ab8 service nova] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Refreshing instance network info cache due to event network-changed-00c97619-997b-46ca-bd58-aa05d59eb0f7. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 590.300525] env[62070]: DEBUG oslo_concurrency.lockutils [req-2788d18b-52c4-42bf-b18b-6a96b40c330d req-7953f80a-46fa-457d-ac4b-9e3bff478ab8 service nova] Acquiring lock "refresh_cache-88251634-8add-4216-b789-dfee77a1ae09" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 590.300525] env[62070]: DEBUG oslo_concurrency.lockutils [req-2788d18b-52c4-42bf-b18b-6a96b40c330d req-7953f80a-46fa-457d-ac4b-9e3bff478ab8 service nova] Acquired lock "refresh_cache-88251634-8add-4216-b789-dfee77a1ae09" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.300525] env[62070]: DEBUG nova.network.neutron [req-2788d18b-52c4-42bf-b18b-6a96b40c330d req-7953f80a-46fa-457d-ac4b-9e3bff478ab8 service nova] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Refreshing network info cache for port 00c97619-997b-46ca-bd58-aa05d59eb0f7 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 590.654457] env[62070]: DEBUG nova.network.neutron [-] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.739219] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 73ab65b7-32e7-4206-8f31-466085319c71 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 590.839485] env[62070]: DEBUG nova.network.neutron [req-2788d18b-52c4-42bf-b18b-6a96b40c330d req-7953f80a-46fa-457d-ac4b-9e3bff478ab8 service nova] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 590.887349] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "efa18997-b502-4e2e-933a-a185ab9074d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.888024] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "efa18997-b502-4e2e-933a-a185ab9074d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.892379] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Acquiring lock "9ec1b7a6-5ade-49a3-ba47-912bb328adb6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.892649] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Lock "9ec1b7a6-5ade-49a3-ba47-912bb328adb6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.141797] env[62070]: DEBUG nova.network.neutron [req-2788d18b-52c4-42bf-b18b-6a96b40c330d req-7953f80a-46fa-457d-ac4b-9e3bff478ab8 service nova] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 591.158895] env[62070]: INFO nova.compute.manager [-] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Took 1.03 seconds to deallocate network for instance. [ 591.162090] env[62070]: DEBUG nova.compute.claims [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 591.162090] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.247050] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 73cb4a44-a4d9-4c8c-8472-f99b5d449cec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 591.648013] env[62070]: DEBUG oslo_concurrency.lockutils [req-2788d18b-52c4-42bf-b18b-6a96b40c330d req-7953f80a-46fa-457d-ac4b-9e3bff478ab8 service nova] Releasing lock "refresh_cache-88251634-8add-4216-b789-dfee77a1ae09" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 591.752488] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 95edf3d1-a987-4768-93be-1e045d7bfa99 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 592.256313] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance b0134b0f-23b4-4d34-b144-71ccdd9fba72 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 592.496597] env[62070]: DEBUG oslo_concurrency.lockutils [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "5da19104-b163-44cd-bb1f-68c4eb316ac1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 592.496653] env[62070]: DEBUG oslo_concurrency.lockutils [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "5da19104-b163-44cd-bb1f-68c4eb316ac1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 592.760651] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance d41f73e0-a188-4cc4-8391-938178aad496 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 592.760651] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=62070) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 592.760651] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=62070) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 592.772816] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Acquiring lock "a71c58e7-89db-4ad2-92e0-5379b04b751c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 592.773731] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Lock "a71c58e7-89db-4ad2-92e0-5379b04b751c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.017386] env[62070]: DEBUG nova.compute.manager [req-c5aaeccf-3831-4fc8-9e0b-b0a82b9934d2 req-8ce1eab4-49ce-44a3-82a4-7ec643425fe4 service nova] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Received event network-vif-deleted-00c97619-997b-46ca-bd58-aa05d59eb0f7 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 593.087815] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Acquiring lock "2c1dfa78-d300-4505-9f87-8e11a4973af3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.088078] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Lock "2c1dfa78-d300-4505-9f87-8e11a4973af3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.174287] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7674720-1003-45b1-a2b9-a87151d1bde6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.183018] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a63c69-fb9a-44f0-bf60-6e622a775c89 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.215138] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fbb81c5-98f2-4abb-8cbf-cb7e610fc26d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.223169] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cbc1b22-c2dc-4f22-b2cc-6c75bcfec3b5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.238115] env[62070]: DEBUG nova.compute.provider_tree [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 593.293498] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquiring lock "d148d561-3211-4f1f-965a-f2b14cd60b11" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.293498] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Lock "d148d561-3211-4f1f-965a-f2b14cd60b11" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.746391] env[62070]: DEBUG nova.scheduler.client.report [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 594.252423] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62070) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 594.252732] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.596s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 594.252939] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.313s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.246106] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09edc7e7-3610-4fc4-9b1d-87cee5aabee1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.262060] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c34d2b0-8524-4d19-8be0-22d73f4ce61d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.297639] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-170e8784-10a2-446e-95b2-6ec8a8e1b490 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.306224] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f131f05d-3dc0-478f-8012-5ddfeaf76019 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.320305] env[62070]: DEBUG nova.compute.provider_tree [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 595.824860] env[62070]: DEBUG nova.scheduler.client.report [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 596.050440] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "0ac963b1-120a-464b-8228-3393135dd182" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.050440] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "0ac963b1-120a-464b-8228-3393135dd182" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.336751] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.083s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 596.337326] env[62070]: ERROR nova.compute.manager [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bd48685f-7c51-4b6b-8b0b-6ac758a2eb4d, please check neutron logs for more information. [ 596.337326] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Traceback (most recent call last): [ 596.337326] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 596.337326] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] self.driver.spawn(context, instance, image_meta, [ 596.337326] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 596.337326] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] self._vmops.spawn(context, instance, image_meta, injected_files, [ 596.337326] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 596.337326] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] vm_ref = self.build_virtual_machine(instance, [ 596.337326] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 596.337326] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] vif_infos = vmwarevif.get_vif_info(self._session, [ 596.337326] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 596.337699] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] for vif in network_info: [ 596.337699] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 596.337699] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] return self._sync_wrapper(fn, *args, **kwargs) [ 596.337699] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 596.337699] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] self.wait() [ 596.337699] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 596.337699] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] self[:] = self._gt.wait() [ 596.337699] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 596.337699] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] return self._exit_event.wait() [ 596.337699] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 596.337699] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] result = hub.switch() [ 596.337699] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 596.337699] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] return self.greenlet.switch() [ 596.338192] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 596.338192] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] result = function(*args, **kwargs) [ 596.338192] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 596.338192] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] return func(*args, **kwargs) [ 596.338192] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 596.338192] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] raise e [ 596.338192] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 596.338192] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] nwinfo = self.network_api.allocate_for_instance( [ 596.338192] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 596.338192] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] created_port_ids = self._update_ports_for_instance( [ 596.338192] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 596.338192] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] with excutils.save_and_reraise_exception(): [ 596.338192] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 596.338586] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] self.force_reraise() [ 596.338586] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 596.338586] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] raise self.value [ 596.338586] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 596.338586] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] updated_port = self._update_port( [ 596.338586] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 596.338586] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] _ensure_no_port_binding_failure(port) [ 596.338586] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 596.338586] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] raise exception.PortBindingFailed(port_id=port['id']) [ 596.338586] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] nova.exception.PortBindingFailed: Binding failed for port bd48685f-7c51-4b6b-8b0b-6ac758a2eb4d, please check neutron logs for more information. [ 596.338586] env[62070]: ERROR nova.compute.manager [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] [ 596.338899] env[62070]: DEBUG nova.compute.utils [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Binding failed for port bd48685f-7c51-4b6b-8b0b-6ac758a2eb4d, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 596.339695] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.852s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.341872] env[62070]: INFO nova.compute.claims [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 596.345971] env[62070]: DEBUG nova.compute.manager [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Build of instance e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619 was re-scheduled: Binding failed for port bd48685f-7c51-4b6b-8b0b-6ac758a2eb4d, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 596.347035] env[62070]: DEBUG nova.compute.manager [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 596.347035] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Acquiring lock "refresh_cache-e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 596.347035] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Acquired lock "refresh_cache-e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.347035] env[62070]: DEBUG nova.network.neutron [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 596.870810] env[62070]: DEBUG nova.network.neutron [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 597.011842] env[62070]: DEBUG nova.network.neutron [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.516159] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Releasing lock "refresh_cache-e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 597.516159] env[62070]: DEBUG nova.compute.manager [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 597.516159] env[62070]: DEBUG nova.compute.manager [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 597.516159] env[62070]: DEBUG nova.network.neutron [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 597.537027] env[62070]: DEBUG nova.network.neutron [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 597.726060] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a551ac9-0a1a-449f-b929-341bd9350a64 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.731572] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9429eb4-18bf-4da9-affd-50ee8a90dc02 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.763622] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a002ada4-2dc1-4532-b5c7-b69111410d4d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.771158] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be0e0b5d-b253-4ce0-9d6d-38c32a95d2cb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.784210] env[62070]: DEBUG nova.compute.provider_tree [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 598.040500] env[62070]: DEBUG nova.network.neutron [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.290063] env[62070]: DEBUG nova.scheduler.client.report [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 598.470638] env[62070]: DEBUG oslo_concurrency.lockutils [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "71aead12-a182-40a7-b5a9-91c01271b800" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.470638] env[62070]: DEBUG oslo_concurrency.lockutils [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "71aead12-a182-40a7-b5a9-91c01271b800" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.546034] env[62070]: INFO nova.compute.manager [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619] Took 1.03 seconds to deallocate network for instance. [ 598.801159] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.462s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.801619] env[62070]: DEBUG nova.compute.manager [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 598.804471] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.802s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.312895] env[62070]: DEBUG nova.compute.utils [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 599.316080] env[62070]: DEBUG nova.compute.manager [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 599.316450] env[62070]: DEBUG nova.network.neutron [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 599.453397] env[62070]: DEBUG nova.policy [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc41dd50c04646dd8f7b01869db7c2d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a66898a129284f79928288b9e8183532', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 599.578482] env[62070]: INFO nova.scheduler.client.report [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Deleted allocations for instance e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619 [ 599.708057] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59339ef8-8e37-46e3-9144-668bef297968 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.716012] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87ce3128-84ca-4292-924a-ceeac2b8bc3b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.749744] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fbeb174-ef3a-48d8-87e8-0569d37dac08 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.757519] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ae04ae-9117-4408-b74d-c44f46f125a1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.771490] env[62070]: DEBUG nova.compute.provider_tree [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 599.820272] env[62070]: DEBUG nova.compute.manager [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 599.883264] env[62070]: DEBUG nova.network.neutron [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Successfully created port: 247acd8a-3707-4fc6-a29f-66b81e3af752 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 600.092880] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1b5d4941-f26d-40d2-8fb4-3b7b7d57038b tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Lock "e3306a4a-b1cf-4d13-b4de-bd6a6a2e2619" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.593s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.275275] env[62070]: DEBUG nova.scheduler.client.report [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 600.597254] env[62070]: DEBUG nova.compute.manager [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 600.784409] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.979s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.785960] env[62070]: ERROR nova.compute.manager [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 611560c7-4689-481c-9fd1-efc64eae1fd4, please check neutron logs for more information. [ 600.785960] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Traceback (most recent call last): [ 600.785960] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 600.785960] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] self.driver.spawn(context, instance, image_meta, [ 600.785960] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 600.785960] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 600.785960] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 600.785960] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] vm_ref = self.build_virtual_machine(instance, [ 600.785960] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 600.785960] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] vif_infos = vmwarevif.get_vif_info(self._session, [ 600.785960] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 600.786328] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] for vif in network_info: [ 600.786328] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 600.786328] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] return self._sync_wrapper(fn, *args, **kwargs) [ 600.786328] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 600.786328] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] self.wait() [ 600.786328] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 600.786328] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] self[:] = self._gt.wait() [ 600.786328] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 600.786328] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] return self._exit_event.wait() [ 600.786328] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 600.786328] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] result = hub.switch() [ 600.786328] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 600.786328] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] return self.greenlet.switch() [ 600.786696] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 600.786696] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] result = function(*args, **kwargs) [ 600.786696] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 600.786696] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] return func(*args, **kwargs) [ 600.786696] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 600.786696] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] raise e [ 600.786696] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 600.786696] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] nwinfo = self.network_api.allocate_for_instance( [ 600.786696] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 600.786696] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] created_port_ids = self._update_ports_for_instance( [ 600.786696] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 600.786696] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] with excutils.save_and_reraise_exception(): [ 600.786696] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 600.787087] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] self.force_reraise() [ 600.787087] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 600.787087] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] raise self.value [ 600.787087] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 600.787087] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] updated_port = self._update_port( [ 600.787087] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 600.787087] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] _ensure_no_port_binding_failure(port) [ 600.787087] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 600.787087] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] raise exception.PortBindingFailed(port_id=port['id']) [ 600.787087] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] nova.exception.PortBindingFailed: Binding failed for port 611560c7-4689-481c-9fd1-efc64eae1fd4, please check neutron logs for more information. [ 600.787087] env[62070]: ERROR nova.compute.manager [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] [ 600.787432] env[62070]: DEBUG nova.compute.utils [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Binding failed for port 611560c7-4689-481c-9fd1-efc64eae1fd4, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 600.788031] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.080s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.788146] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.791298] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.061s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.794330] env[62070]: INFO nova.compute.claims [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 600.802289] env[62070]: DEBUG nova.compute.manager [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Build of instance 5936aded-90fc-4f77-8103-8c9e1912379c was re-scheduled: Binding failed for port 611560c7-4689-481c-9fd1-efc64eae1fd4, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 600.802289] env[62070]: DEBUG nova.compute.manager [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 600.802289] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Acquiring lock "refresh_cache-5936aded-90fc-4f77-8103-8c9e1912379c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.802289] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Acquired lock "refresh_cache-5936aded-90fc-4f77-8103-8c9e1912379c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.803238] env[62070]: DEBUG nova.network.neutron [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 600.827483] env[62070]: INFO nova.scheduler.client.report [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Deleted allocations for instance 283e7488-1240-475f-a74d-809251950774 [ 600.832921] env[62070]: DEBUG nova.compute.manager [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 600.869537] env[62070]: DEBUG nova.virt.hardware [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 600.869765] env[62070]: DEBUG nova.virt.hardware [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 600.870798] env[62070]: DEBUG nova.virt.hardware [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 600.870798] env[62070]: DEBUG nova.virt.hardware [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 600.870798] env[62070]: DEBUG nova.virt.hardware [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 600.870798] env[62070]: DEBUG nova.virt.hardware [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 600.870798] env[62070]: DEBUG nova.virt.hardware [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 600.871214] env[62070]: DEBUG nova.virt.hardware [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 600.871447] env[62070]: DEBUG nova.virt.hardware [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 600.871619] env[62070]: DEBUG nova.virt.hardware [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 600.871791] env[62070]: DEBUG nova.virt.hardware [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 600.872730] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1bfd99-1e66-4fbe-bbe5-99156d7817e5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.885309] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc2331e-1e44-4456-aad2-46abb1741884 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.012463] env[62070]: ERROR nova.compute.manager [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 247acd8a-3707-4fc6-a29f-66b81e3af752, please check neutron logs for more information. [ 601.012463] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 601.012463] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 601.012463] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 601.012463] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 601.012463] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 601.012463] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 601.012463] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 601.012463] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 601.012463] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 601.012463] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 601.012463] env[62070]: ERROR nova.compute.manager raise self.value [ 601.012463] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 601.012463] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 601.012463] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 601.012463] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 601.013047] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 601.013047] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 601.013047] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 247acd8a-3707-4fc6-a29f-66b81e3af752, please check neutron logs for more information. [ 601.013047] env[62070]: ERROR nova.compute.manager [ 601.013047] env[62070]: Traceback (most recent call last): [ 601.013047] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 601.013047] env[62070]: listener.cb(fileno) [ 601.013047] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 601.013047] env[62070]: result = function(*args, **kwargs) [ 601.013047] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 601.013047] env[62070]: return func(*args, **kwargs) [ 601.013047] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 601.013047] env[62070]: raise e [ 601.013047] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 601.013047] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 601.013047] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 601.013047] env[62070]: created_port_ids = self._update_ports_for_instance( [ 601.013047] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 601.013047] env[62070]: with excutils.save_and_reraise_exception(): [ 601.013047] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 601.013047] env[62070]: self.force_reraise() [ 601.013047] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 601.013047] env[62070]: raise self.value [ 601.013047] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 601.013047] env[62070]: updated_port = self._update_port( [ 601.013047] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 601.013047] env[62070]: _ensure_no_port_binding_failure(port) [ 601.013047] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 601.013047] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 601.015747] env[62070]: nova.exception.PortBindingFailed: Binding failed for port 247acd8a-3707-4fc6-a29f-66b81e3af752, please check neutron logs for more information. [ 601.015747] env[62070]: Removing descriptor: 19 [ 601.015747] env[62070]: ERROR nova.compute.manager [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 247acd8a-3707-4fc6-a29f-66b81e3af752, please check neutron logs for more information. [ 601.015747] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] Traceback (most recent call last): [ 601.015747] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 601.015747] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] yield resources [ 601.015747] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 601.015747] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] self.driver.spawn(context, instance, image_meta, [ 601.015747] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 601.015747] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] self._vmops.spawn(context, instance, image_meta, injected_files, [ 601.015747] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 601.015747] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] vm_ref = self.build_virtual_machine(instance, [ 601.016360] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 601.016360] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] vif_infos = vmwarevif.get_vif_info(self._session, [ 601.016360] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 601.016360] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] for vif in network_info: [ 601.016360] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 601.016360] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] return self._sync_wrapper(fn, *args, **kwargs) [ 601.016360] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 601.016360] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] self.wait() [ 601.016360] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 601.016360] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] self[:] = self._gt.wait() [ 601.016360] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 601.016360] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] return self._exit_event.wait() [ 601.016360] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 601.016865] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] result = hub.switch() [ 601.016865] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 601.016865] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] return self.greenlet.switch() [ 601.016865] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 601.016865] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] result = function(*args, **kwargs) [ 601.016865] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 601.016865] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] return func(*args, **kwargs) [ 601.016865] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 601.016865] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] raise e [ 601.016865] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 601.016865] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] nwinfo = self.network_api.allocate_for_instance( [ 601.016865] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 601.016865] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] created_port_ids = self._update_ports_for_instance( [ 601.017334] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 601.017334] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] with excutils.save_and_reraise_exception(): [ 601.017334] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 601.017334] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] self.force_reraise() [ 601.017334] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 601.017334] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] raise self.value [ 601.017334] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 601.017334] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] updated_port = self._update_port( [ 601.017334] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 601.017334] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] _ensure_no_port_binding_failure(port) [ 601.017334] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 601.017334] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] raise exception.PortBindingFailed(port_id=port['id']) [ 601.017741] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] nova.exception.PortBindingFailed: Binding failed for port 247acd8a-3707-4fc6-a29f-66b81e3af752, please check neutron logs for more information. [ 601.017741] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] [ 601.017741] env[62070]: INFO nova.compute.manager [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Terminating instance [ 601.019828] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Acquiring lock "refresh_cache-495a15b2-20bd-44d2-8020-816031e89832" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 601.019828] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Acquired lock "refresh_cache-495a15b2-20bd-44d2-8020-816031e89832" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.019828] env[62070]: DEBUG nova.network.neutron [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 601.071102] env[62070]: DEBUG nova.compute.manager [req-98c3a2de-bddc-472a-b6f7-a61f82f1b754 req-698df6c6-5068-4510-9501-2b95963a453d service nova] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Received event network-changed-247acd8a-3707-4fc6-a29f-66b81e3af752 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 601.071316] env[62070]: DEBUG nova.compute.manager [req-98c3a2de-bddc-472a-b6f7-a61f82f1b754 req-698df6c6-5068-4510-9501-2b95963a453d service nova] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Refreshing instance network info cache due to event network-changed-247acd8a-3707-4fc6-a29f-66b81e3af752. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 601.071471] env[62070]: DEBUG oslo_concurrency.lockutils [req-98c3a2de-bddc-472a-b6f7-a61f82f1b754 req-698df6c6-5068-4510-9501-2b95963a453d service nova] Acquiring lock "refresh_cache-495a15b2-20bd-44d2-8020-816031e89832" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 601.121929] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.340274] env[62070]: DEBUG nova.network.neutron [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 601.344942] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e5f1ab2d-b058-40d2-b380-7df2dee6e48f tempest-ServerDiagnosticsV248Test-1284349500 tempest-ServerDiagnosticsV248Test-1284349500-project-member] Lock "283e7488-1240-475f-a74d-809251950774" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.760s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 601.446570] env[62070]: DEBUG nova.network.neutron [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.541592] env[62070]: DEBUG nova.network.neutron [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 601.648284] env[62070]: DEBUG nova.network.neutron [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.951180] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Releasing lock "refresh_cache-5936aded-90fc-4f77-8103-8c9e1912379c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 601.951448] env[62070]: DEBUG nova.compute.manager [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 601.951628] env[62070]: DEBUG nova.compute.manager [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 601.951793] env[62070]: DEBUG nova.network.neutron [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 601.970704] env[62070]: DEBUG nova.network.neutron [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 602.151955] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Releasing lock "refresh_cache-495a15b2-20bd-44d2-8020-816031e89832" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.152823] env[62070]: DEBUG nova.compute.manager [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 602.153027] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 602.153799] env[62070]: DEBUG oslo_concurrency.lockutils [req-98c3a2de-bddc-472a-b6f7-a61f82f1b754 req-698df6c6-5068-4510-9501-2b95963a453d service nova] Acquired lock "refresh_cache-495a15b2-20bd-44d2-8020-816031e89832" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.153799] env[62070]: DEBUG nova.network.neutron [req-98c3a2de-bddc-472a-b6f7-a61f82f1b754 req-698df6c6-5068-4510-9501-2b95963a453d service nova] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Refreshing network info cache for port 247acd8a-3707-4fc6-a29f-66b81e3af752 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 602.155177] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f75a75c2-0a4e-4539-92b5-e3ffc9ad254c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.165234] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ee999a-df02-4ac6-b87e-2bf54f55b5bd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.190619] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 495a15b2-20bd-44d2-8020-816031e89832 could not be found. [ 602.191026] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 602.191026] env[62070]: INFO nova.compute.manager [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Took 0.04 seconds to destroy the instance on the hypervisor. [ 602.191369] env[62070]: DEBUG oslo.service.loopingcall [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 602.191489] env[62070]: DEBUG nova.compute.manager [-] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 602.191616] env[62070]: DEBUG nova.network.neutron [-] [instance: 495a15b2-20bd-44d2-8020-816031e89832] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 602.218778] env[62070]: DEBUG nova.network.neutron [-] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 602.253393] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a3da123-d6b0-4e68-96a5-dbd8ecd1edf7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.264051] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5928a0f1-f3ff-4e2d-8499-fae5a89cc15a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.297739] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7563be2-720a-47f7-bc5e-7c7096f1592e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.305063] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb014db-5795-4e31-951b-e0f4050eff7b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.323367] env[62070]: DEBUG nova.compute.provider_tree [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 602.474886] env[62070]: DEBUG nova.network.neutron [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.688450] env[62070]: DEBUG nova.network.neutron [req-98c3a2de-bddc-472a-b6f7-a61f82f1b754 req-698df6c6-5068-4510-9501-2b95963a453d service nova] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 602.726868] env[62070]: DEBUG nova.network.neutron [-] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.826869] env[62070]: DEBUG nova.scheduler.client.report [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 602.929874] env[62070]: DEBUG nova.network.neutron [req-98c3a2de-bddc-472a-b6f7-a61f82f1b754 req-698df6c6-5068-4510-9501-2b95963a453d service nova] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.981219] env[62070]: INFO nova.compute.manager [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] [instance: 5936aded-90fc-4f77-8103-8c9e1912379c] Took 1.03 seconds to deallocate network for instance. [ 603.229552] env[62070]: INFO nova.compute.manager [-] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Took 1.04 seconds to deallocate network for instance. [ 603.236485] env[62070]: DEBUG nova.compute.claims [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 603.236734] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.332365] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.541s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.332942] env[62070]: DEBUG nova.compute.manager [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 603.339394] env[62070]: DEBUG oslo_concurrency.lockutils [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.708s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.341036] env[62070]: INFO nova.compute.claims [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 603.437421] env[62070]: DEBUG oslo_concurrency.lockutils [req-98c3a2de-bddc-472a-b6f7-a61f82f1b754 req-698df6c6-5068-4510-9501-2b95963a453d service nova] Releasing lock "refresh_cache-495a15b2-20bd-44d2-8020-816031e89832" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.476209] env[62070]: DEBUG nova.compute.manager [req-4195883f-c9c1-4585-ad86-f855caae1cd2 req-ad9228b9-60bc-4317-952b-135088e30a76 service nova] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Received event network-vif-deleted-247acd8a-3707-4fc6-a29f-66b81e3af752 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 603.853569] env[62070]: DEBUG nova.compute.utils [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 603.856257] env[62070]: DEBUG nova.compute.manager [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 603.857546] env[62070]: DEBUG nova.network.neutron [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 603.921165] env[62070]: DEBUG nova.policy [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '697f9c322cc54f3c9d5908671c022d74', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bec8bda703b046708a4aa06e70caf300', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 604.135714] env[62070]: INFO nova.scheduler.client.report [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Deleted allocations for instance 5936aded-90fc-4f77-8103-8c9e1912379c [ 604.366091] env[62070]: DEBUG nova.compute.manager [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 604.647382] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ff380498-571f-460a-ac0d-214b8f51e56f tempest-ServersAdminTestJSON-1165817432 tempest-ServersAdminTestJSON-1165817432-project-member] Lock "5936aded-90fc-4f77-8103-8c9e1912379c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.883s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 604.726430] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16dc09fa-d2e7-4926-a0b7-101f7617335d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.737170] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a0af610-8efe-4da4-922f-1326ca0a1a51 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.774039] env[62070]: DEBUG nova.network.neutron [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Successfully created port: de7507a7-5aad-4849-8609-8ff49b3f040e {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 604.776747] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa6a618d-0807-48a8-8dc3-20691c7e97e4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.785521] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e1400a3-e743-4d7b-aba3-852f2cd70092 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.799214] env[62070]: DEBUG nova.compute.provider_tree [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 604.889996] env[62070]: DEBUG oslo_concurrency.lockutils [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Acquiring lock "d0914f90-200c-4715-aaab-54beacf339b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.890260] env[62070]: DEBUG oslo_concurrency.lockutils [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Lock "d0914f90-200c-4715-aaab-54beacf339b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.154069] env[62070]: DEBUG nova.compute.manager [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 605.304044] env[62070]: DEBUG nova.scheduler.client.report [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 605.382518] env[62070]: DEBUG nova.compute.manager [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 605.412349] env[62070]: DEBUG nova.virt.hardware [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:23:21Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1234749509',id=23,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1233863880',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 605.412624] env[62070]: DEBUG nova.virt.hardware [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 605.412821] env[62070]: DEBUG nova.virt.hardware [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 605.413826] env[62070]: DEBUG nova.virt.hardware [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 605.414138] env[62070]: DEBUG nova.virt.hardware [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 605.415353] env[62070]: DEBUG nova.virt.hardware [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 605.415353] env[62070]: DEBUG nova.virt.hardware [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 605.415353] env[62070]: DEBUG nova.virt.hardware [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 605.415353] env[62070]: DEBUG nova.virt.hardware [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 605.415785] env[62070]: DEBUG nova.virt.hardware [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 605.416313] env[62070]: DEBUG nova.virt.hardware [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 605.417205] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c89186-f09a-4a0f-a253-b73b2c3daa94 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.427308] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98e5f57b-ae9a-4cf5-8bcd-c9e14aacc4a7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.684120] env[62070]: DEBUG oslo_concurrency.lockutils [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.811841] env[62070]: DEBUG oslo_concurrency.lockutils [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.472s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.812420] env[62070]: DEBUG nova.compute.manager [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 605.815362] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.722s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.320143] env[62070]: DEBUG nova.compute.utils [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 606.325673] env[62070]: DEBUG nova.compute.manager [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 606.325673] env[62070]: DEBUG nova.network.neutron [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 606.476640] env[62070]: DEBUG nova.policy [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ffbbe338605046f78981f1f9b43a224c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a71d9fcf32e49539be7d6dff7afca30', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 606.597689] env[62070]: DEBUG nova.compute.manager [req-44151e7c-d317-403d-807c-f23dddcc18b7 req-755bc149-7596-4aad-8262-35190c6e069a service nova] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Received event network-changed-de7507a7-5aad-4849-8609-8ff49b3f040e {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 606.597934] env[62070]: DEBUG nova.compute.manager [req-44151e7c-d317-403d-807c-f23dddcc18b7 req-755bc149-7596-4aad-8262-35190c6e069a service nova] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Refreshing instance network info cache due to event network-changed-de7507a7-5aad-4849-8609-8ff49b3f040e. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 606.599078] env[62070]: DEBUG oslo_concurrency.lockutils [req-44151e7c-d317-403d-807c-f23dddcc18b7 req-755bc149-7596-4aad-8262-35190c6e069a service nova] Acquiring lock "refresh_cache-7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.599078] env[62070]: DEBUG oslo_concurrency.lockutils [req-44151e7c-d317-403d-807c-f23dddcc18b7 req-755bc149-7596-4aad-8262-35190c6e069a service nova] Acquired lock "refresh_cache-7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.599078] env[62070]: DEBUG nova.network.neutron [req-44151e7c-d317-403d-807c-f23dddcc18b7 req-755bc149-7596-4aad-8262-35190c6e069a service nova] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Refreshing network info cache for port de7507a7-5aad-4849-8609-8ff49b3f040e {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 606.729714] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e12d2f-5981-41a4-9da0-965852357b88 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.737503] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75642ae3-b5fd-406e-81eb-400385d82da9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.778741] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b51acac2-b674-481d-b97c-7e9bb518ebc2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.786857] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e043ec98-b5ba-4d82-a44a-ef053ce8c30b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.801400] env[62070]: DEBUG nova.compute.provider_tree [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 606.828553] env[62070]: DEBUG nova.compute.manager [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 606.961522] env[62070]: ERROR nova.compute.manager [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port de7507a7-5aad-4849-8609-8ff49b3f040e, please check neutron logs for more information. [ 606.961522] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 606.961522] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 606.961522] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 606.961522] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 606.961522] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 606.961522] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 606.961522] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 606.961522] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 606.961522] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 606.961522] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 606.961522] env[62070]: ERROR nova.compute.manager raise self.value [ 606.961522] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 606.961522] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 606.961522] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 606.961522] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 606.962716] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 606.962716] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 606.962716] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port de7507a7-5aad-4849-8609-8ff49b3f040e, please check neutron logs for more information. [ 606.962716] env[62070]: ERROR nova.compute.manager [ 606.962716] env[62070]: Traceback (most recent call last): [ 606.962716] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 606.962716] env[62070]: listener.cb(fileno) [ 606.962716] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 606.962716] env[62070]: result = function(*args, **kwargs) [ 606.962716] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 606.962716] env[62070]: return func(*args, **kwargs) [ 606.962716] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 606.962716] env[62070]: raise e [ 606.962716] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 606.962716] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 606.962716] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 606.962716] env[62070]: created_port_ids = self._update_ports_for_instance( [ 606.962716] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 606.962716] env[62070]: with excutils.save_and_reraise_exception(): [ 606.962716] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 606.962716] env[62070]: self.force_reraise() [ 606.962716] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 606.962716] env[62070]: raise self.value [ 606.962716] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 606.962716] env[62070]: updated_port = self._update_port( [ 606.962716] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 606.962716] env[62070]: _ensure_no_port_binding_failure(port) [ 606.962716] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 606.962716] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 606.963604] env[62070]: nova.exception.PortBindingFailed: Binding failed for port de7507a7-5aad-4849-8609-8ff49b3f040e, please check neutron logs for more information. [ 606.963604] env[62070]: Removing descriptor: 19 [ 606.963604] env[62070]: ERROR nova.compute.manager [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port de7507a7-5aad-4849-8609-8ff49b3f040e, please check neutron logs for more information. [ 606.963604] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Traceback (most recent call last): [ 606.963604] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 606.963604] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] yield resources [ 606.963604] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 606.963604] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] self.driver.spawn(context, instance, image_meta, [ 606.963604] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 606.963604] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 606.963604] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 606.963604] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] vm_ref = self.build_virtual_machine(instance, [ 606.964020] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 606.964020] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] vif_infos = vmwarevif.get_vif_info(self._session, [ 606.964020] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 606.964020] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] for vif in network_info: [ 606.964020] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 606.964020] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] return self._sync_wrapper(fn, *args, **kwargs) [ 606.964020] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 606.964020] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] self.wait() [ 606.964020] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 606.964020] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] self[:] = self._gt.wait() [ 606.964020] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 606.964020] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] return self._exit_event.wait() [ 606.964020] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 606.965173] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] result = hub.switch() [ 606.965173] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 606.965173] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] return self.greenlet.switch() [ 606.965173] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 606.965173] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] result = function(*args, **kwargs) [ 606.965173] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 606.965173] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] return func(*args, **kwargs) [ 606.965173] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 606.965173] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] raise e [ 606.965173] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 606.965173] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] nwinfo = self.network_api.allocate_for_instance( [ 606.965173] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 606.965173] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] created_port_ids = self._update_ports_for_instance( [ 606.965638] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 606.965638] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] with excutils.save_and_reraise_exception(): [ 606.965638] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 606.965638] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] self.force_reraise() [ 606.965638] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 606.965638] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] raise self.value [ 606.965638] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 606.965638] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] updated_port = self._update_port( [ 606.965638] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 606.965638] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] _ensure_no_port_binding_failure(port) [ 606.965638] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 606.965638] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] raise exception.PortBindingFailed(port_id=port['id']) [ 606.966298] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] nova.exception.PortBindingFailed: Binding failed for port de7507a7-5aad-4849-8609-8ff49b3f040e, please check neutron logs for more information. [ 606.966298] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] [ 606.966298] env[62070]: INFO nova.compute.manager [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Terminating instance [ 606.967894] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Acquiring lock "refresh_cache-7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.162921] env[62070]: DEBUG nova.network.neutron [req-44151e7c-d317-403d-807c-f23dddcc18b7 req-755bc149-7596-4aad-8262-35190c6e069a service nova] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 607.305219] env[62070]: DEBUG nova.scheduler.client.report [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 607.430979] env[62070]: DEBUG nova.network.neutron [req-44151e7c-d317-403d-807c-f23dddcc18b7 req-755bc149-7596-4aad-8262-35190c6e069a service nova] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.620103] env[62070]: DEBUG nova.network.neutron [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Successfully created port: 9037323c-72d6-4bfa-ad3e-0a3d37347560 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 607.814880] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.999s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 607.817181] env[62070]: ERROR nova.compute.manager [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 900622a2-0f27-4e6e-9fd4-8a0a81d8d602, please check neutron logs for more information. [ 607.817181] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Traceback (most recent call last): [ 607.817181] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 607.817181] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] self.driver.spawn(context, instance, image_meta, [ 607.817181] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 607.817181] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] self._vmops.spawn(context, instance, image_meta, injected_files, [ 607.817181] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 607.817181] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] vm_ref = self.build_virtual_machine(instance, [ 607.817181] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 607.817181] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] vif_infos = vmwarevif.get_vif_info(self._session, [ 607.817181] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 607.817746] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] for vif in network_info: [ 607.817746] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 607.817746] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] return self._sync_wrapper(fn, *args, **kwargs) [ 607.817746] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 607.817746] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] self.wait() [ 607.817746] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 607.817746] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] self[:] = self._gt.wait() [ 607.817746] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 607.817746] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] return self._exit_event.wait() [ 607.817746] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 607.817746] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] result = hub.switch() [ 607.817746] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 607.817746] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] return self.greenlet.switch() [ 607.818284] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 607.818284] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] result = function(*args, **kwargs) [ 607.818284] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 607.818284] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] return func(*args, **kwargs) [ 607.818284] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 607.818284] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] raise e [ 607.818284] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 607.818284] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] nwinfo = self.network_api.allocate_for_instance( [ 607.818284] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 607.818284] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] created_port_ids = self._update_ports_for_instance( [ 607.818284] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 607.818284] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] with excutils.save_and_reraise_exception(): [ 607.818284] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 607.818971] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] self.force_reraise() [ 607.818971] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 607.818971] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] raise self.value [ 607.818971] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 607.818971] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] updated_port = self._update_port( [ 607.818971] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 607.818971] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] _ensure_no_port_binding_failure(port) [ 607.818971] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 607.818971] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] raise exception.PortBindingFailed(port_id=port['id']) [ 607.818971] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] nova.exception.PortBindingFailed: Binding failed for port 900622a2-0f27-4e6e-9fd4-8a0a81d8d602, please check neutron logs for more information. [ 607.818971] env[62070]: ERROR nova.compute.manager [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] [ 607.819329] env[62070]: DEBUG nova.compute.utils [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Binding failed for port 900622a2-0f27-4e6e-9fd4-8a0a81d8d602, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 607.821288] env[62070]: DEBUG nova.compute.manager [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Build of instance 317f20e9-6ba1-4b41-b298-5dd844f323ac was re-scheduled: Binding failed for port 900622a2-0f27-4e6e-9fd4-8a0a81d8d602, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 607.821766] env[62070]: DEBUG nova.compute.manager [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 607.822022] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Acquiring lock "refresh_cache-317f20e9-6ba1-4b41-b298-5dd844f323ac" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.822184] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Acquired lock "refresh_cache-317f20e9-6ba1-4b41-b298-5dd844f323ac" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.822457] env[62070]: DEBUG nova.network.neutron [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 607.825441] env[62070]: DEBUG oslo_concurrency.lockutils [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.015s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.831613] env[62070]: INFO nova.compute.claims [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 607.839955] env[62070]: DEBUG nova.compute.manager [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 607.888540] env[62070]: DEBUG nova.virt.hardware [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 607.888677] env[62070]: DEBUG nova.virt.hardware [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 607.889078] env[62070]: DEBUG nova.virt.hardware [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 607.889078] env[62070]: DEBUG nova.virt.hardware [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 607.889959] env[62070]: DEBUG nova.virt.hardware [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 607.890118] env[62070]: DEBUG nova.virt.hardware [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 607.890703] env[62070]: DEBUG nova.virt.hardware [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 607.890703] env[62070]: DEBUG nova.virt.hardware [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 607.890703] env[62070]: DEBUG nova.virt.hardware [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 607.890703] env[62070]: DEBUG nova.virt.hardware [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 607.890912] env[62070]: DEBUG nova.virt.hardware [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 607.896606] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-445697c0-7515-44c4-9729-5479eed9121e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.902959] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73454a57-3199-4c99-bb68-76c5069d8e4b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.937324] env[62070]: DEBUG oslo_concurrency.lockutils [req-44151e7c-d317-403d-807c-f23dddcc18b7 req-755bc149-7596-4aad-8262-35190c6e069a service nova] Releasing lock "refresh_cache-7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 607.937680] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Acquired lock "refresh_cache-7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.937680] env[62070]: DEBUG nova.network.neutron [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 608.354020] env[62070]: DEBUG nova.network.neutron [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 608.483134] env[62070]: DEBUG nova.network.neutron [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 608.494159] env[62070]: DEBUG nova.network.neutron [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.716946] env[62070]: DEBUG nova.network.neutron [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.787617] env[62070]: DEBUG nova.compute.manager [req-b79ad472-b066-4d3c-9ada-4ecdae700477 req-ad87d70e-717e-4d9d-a637-5dd8d8ba2cc2 service nova] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Received event network-vif-deleted-de7507a7-5aad-4849-8609-8ff49b3f040e {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 608.996095] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Releasing lock "refresh_cache-317f20e9-6ba1-4b41-b298-5dd844f323ac" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.996560] env[62070]: DEBUG nova.compute.manager [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 608.999344] env[62070]: DEBUG nova.compute.manager [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 608.999344] env[62070]: DEBUG nova.network.neutron [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 609.035831] env[62070]: DEBUG nova.network.neutron [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 609.224337] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Releasing lock "refresh_cache-7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.224785] env[62070]: DEBUG nova.compute.manager [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 609.224974] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 609.228688] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7613169e-cb10-4a42-a201-d7b4ad1bb384 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.242969] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a911097-f75f-47c1-a44e-dd9b6c3d8e31 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.269091] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a could not be found. [ 609.269091] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 609.269264] env[62070]: INFO nova.compute.manager [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 609.269584] env[62070]: DEBUG oslo.service.loopingcall [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 609.269845] env[62070]: DEBUG nova.compute.manager [-] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 609.269954] env[62070]: DEBUG nova.network.neutron [-] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 609.302047] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d1515d-96e9-4bf9-a951-b1228e1b3046 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.311026] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c29578-bfc0-4e52-b01c-a462a2816d0a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.316157] env[62070]: DEBUG nova.network.neutron [-] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 609.350419] env[62070]: ERROR nova.compute.manager [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9037323c-72d6-4bfa-ad3e-0a3d37347560, please check neutron logs for more information. [ 609.350419] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 609.350419] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 609.350419] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 609.350419] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 609.350419] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 609.350419] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 609.350419] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 609.350419] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 609.350419] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 609.350419] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 609.350419] env[62070]: ERROR nova.compute.manager raise self.value [ 609.350419] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 609.350419] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 609.350419] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 609.350419] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 609.351057] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 609.351057] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 609.351057] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9037323c-72d6-4bfa-ad3e-0a3d37347560, please check neutron logs for more information. [ 609.351057] env[62070]: ERROR nova.compute.manager [ 609.351324] env[62070]: Traceback (most recent call last): [ 609.351324] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 609.351324] env[62070]: listener.cb(fileno) [ 609.351324] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 609.351324] env[62070]: result = function(*args, **kwargs) [ 609.351324] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 609.351324] env[62070]: return func(*args, **kwargs) [ 609.351324] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 609.351324] env[62070]: raise e [ 609.351324] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 609.351324] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 609.351324] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 609.351324] env[62070]: created_port_ids = self._update_ports_for_instance( [ 609.351324] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 609.351324] env[62070]: with excutils.save_and_reraise_exception(): [ 609.351324] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 609.351324] env[62070]: self.force_reraise() [ 609.351324] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 609.351324] env[62070]: raise self.value [ 609.351324] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 609.351324] env[62070]: updated_port = self._update_port( [ 609.351324] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 609.351324] env[62070]: _ensure_no_port_binding_failure(port) [ 609.351324] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 609.351324] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 609.351324] env[62070]: nova.exception.PortBindingFailed: Binding failed for port 9037323c-72d6-4bfa-ad3e-0a3d37347560, please check neutron logs for more information. [ 609.351324] env[62070]: Removing descriptor: 16 [ 609.353874] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9423e861-52be-47bd-ab71-77755a9aecb1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.356981] env[62070]: ERROR nova.compute.manager [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9037323c-72d6-4bfa-ad3e-0a3d37347560, please check neutron logs for more information. [ 609.356981] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Traceback (most recent call last): [ 609.356981] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 609.356981] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] yield resources [ 609.356981] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 609.356981] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] self.driver.spawn(context, instance, image_meta, [ 609.356981] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 609.356981] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] self._vmops.spawn(context, instance, image_meta, injected_files, [ 609.356981] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 609.356981] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] vm_ref = self.build_virtual_machine(instance, [ 609.356981] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 609.357400] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] vif_infos = vmwarevif.get_vif_info(self._session, [ 609.357400] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 609.357400] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] for vif in network_info: [ 609.357400] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 609.357400] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] return self._sync_wrapper(fn, *args, **kwargs) [ 609.357400] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 609.357400] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] self.wait() [ 609.357400] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 609.357400] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] self[:] = self._gt.wait() [ 609.357400] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 609.357400] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] return self._exit_event.wait() [ 609.357400] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 609.357400] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] result = hub.switch() [ 609.357893] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 609.357893] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] return self.greenlet.switch() [ 609.357893] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 609.357893] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] result = function(*args, **kwargs) [ 609.357893] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 609.357893] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] return func(*args, **kwargs) [ 609.357893] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 609.357893] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] raise e [ 609.357893] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 609.357893] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] nwinfo = self.network_api.allocate_for_instance( [ 609.357893] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 609.357893] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] created_port_ids = self._update_ports_for_instance( [ 609.357893] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 609.358345] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] with excutils.save_and_reraise_exception(): [ 609.358345] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 609.358345] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] self.force_reraise() [ 609.358345] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 609.358345] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] raise self.value [ 609.358345] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 609.358345] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] updated_port = self._update_port( [ 609.358345] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 609.358345] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] _ensure_no_port_binding_failure(port) [ 609.358345] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 609.358345] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] raise exception.PortBindingFailed(port_id=port['id']) [ 609.358345] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] nova.exception.PortBindingFailed: Binding failed for port 9037323c-72d6-4bfa-ad3e-0a3d37347560, please check neutron logs for more information. [ 609.358345] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] [ 609.358799] env[62070]: INFO nova.compute.manager [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Terminating instance [ 609.362337] env[62070]: DEBUG oslo_concurrency.lockutils [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Acquiring lock "refresh_cache-2226072d-16f2-4ea1-a56c-d866554c7379" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.362337] env[62070]: DEBUG oslo_concurrency.lockutils [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Acquired lock "refresh_cache-2226072d-16f2-4ea1-a56c-d866554c7379" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.362337] env[62070]: DEBUG nova.network.neutron [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 609.367793] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b85e6901-95db-418d-b47c-49603f825fba {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.384923] env[62070]: DEBUG nova.compute.provider_tree [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 609.540816] env[62070]: DEBUG nova.network.neutron [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.820110] env[62070]: DEBUG nova.network.neutron [-] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.885267] env[62070]: DEBUG nova.network.neutron [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 609.894271] env[62070]: DEBUG nova.scheduler.client.report [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 609.988541] env[62070]: DEBUG nova.network.neutron [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.048482] env[62070]: INFO nova.compute.manager [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] [instance: 317f20e9-6ba1-4b41-b298-5dd844f323ac] Took 1.05 seconds to deallocate network for instance. [ 610.325571] env[62070]: INFO nova.compute.manager [-] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Took 1.06 seconds to deallocate network for instance. [ 610.328821] env[62070]: DEBUG nova.compute.claims [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 610.329279] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.399143] env[62070]: DEBUG oslo_concurrency.lockutils [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.574s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.399618] env[62070]: DEBUG nova.compute.manager [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 610.407906] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.135s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.410028] env[62070]: INFO nova.compute.claims [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 610.496160] env[62070]: DEBUG oslo_concurrency.lockutils [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Releasing lock "refresh_cache-2226072d-16f2-4ea1-a56c-d866554c7379" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 610.496160] env[62070]: DEBUG nova.compute.manager [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 610.497179] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 610.497179] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7798ec10-27d3-4d12-957d-c34dd429e8d5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.509920] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ee3fc9-1630-4939-a99c-826c2de2b574 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.540788] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2226072d-16f2-4ea1-a56c-d866554c7379 could not be found. [ 610.540965] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 610.540965] env[62070]: INFO nova.compute.manager [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Took 0.04 seconds to destroy the instance on the hypervisor. [ 610.542242] env[62070]: DEBUG oslo.service.loopingcall [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 610.542242] env[62070]: DEBUG nova.compute.manager [-] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 610.542358] env[62070]: DEBUG nova.network.neutron [-] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 610.730777] env[62070]: DEBUG nova.network.neutron [-] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 610.919996] env[62070]: DEBUG nova.compute.utils [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 610.925432] env[62070]: DEBUG nova.compute.manager [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 610.925432] env[62070]: DEBUG nova.network.neutron [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 611.061850] env[62070]: DEBUG nova.policy [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8b2d0e3dbb964adca5dfe5305067dc52', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef201583058f4348947c3ff51bdd9e45', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 611.102285] env[62070]: INFO nova.scheduler.client.report [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Deleted allocations for instance 317f20e9-6ba1-4b41-b298-5dd844f323ac [ 611.234534] env[62070]: DEBUG nova.network.neutron [-] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.236068] env[62070]: DEBUG oslo_concurrency.lockutils [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Acquiring lock "30d782e4-30c7-41f6-b30d-95a9a59cf83c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.236068] env[62070]: DEBUG oslo_concurrency.lockutils [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Lock "30d782e4-30c7-41f6-b30d-95a9a59cf83c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.338213] env[62070]: DEBUG nova.compute.manager [req-5c80f8f1-01a7-4b57-9de0-7c5f7f79cb41 req-63e131c5-232e-4329-9a88-2b7c9721db34 service nova] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Received event network-changed-9037323c-72d6-4bfa-ad3e-0a3d37347560 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 611.338213] env[62070]: DEBUG nova.compute.manager [req-5c80f8f1-01a7-4b57-9de0-7c5f7f79cb41 req-63e131c5-232e-4329-9a88-2b7c9721db34 service nova] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Refreshing instance network info cache due to event network-changed-9037323c-72d6-4bfa-ad3e-0a3d37347560. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 611.338213] env[62070]: DEBUG oslo_concurrency.lockutils [req-5c80f8f1-01a7-4b57-9de0-7c5f7f79cb41 req-63e131c5-232e-4329-9a88-2b7c9721db34 service nova] Acquiring lock "refresh_cache-2226072d-16f2-4ea1-a56c-d866554c7379" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.338213] env[62070]: DEBUG oslo_concurrency.lockutils [req-5c80f8f1-01a7-4b57-9de0-7c5f7f79cb41 req-63e131c5-232e-4329-9a88-2b7c9721db34 service nova] Acquired lock "refresh_cache-2226072d-16f2-4ea1-a56c-d866554c7379" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.338213] env[62070]: DEBUG nova.network.neutron [req-5c80f8f1-01a7-4b57-9de0-7c5f7f79cb41 req-63e131c5-232e-4329-9a88-2b7c9721db34 service nova] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Refreshing network info cache for port 9037323c-72d6-4bfa-ad3e-0a3d37347560 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 611.426053] env[62070]: DEBUG nova.compute.manager [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 611.613187] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7301dc1c-6dd9-44f9-a50e-255658085cb3 tempest-ServerDiagnosticsTest-24419400 tempest-ServerDiagnosticsTest-24419400-project-member] Lock "317f20e9-6ba1-4b41-b298-5dd844f323ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.226s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 611.719151] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "67e99ada-a8e6-4034-b19b-5b2cb883b735" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.719576] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "67e99ada-a8e6-4034-b19b-5b2cb883b735" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.728551] env[62070]: DEBUG nova.network.neutron [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Successfully created port: 8016a15a-fbb1-4dfd-8dba-234cee68c43c {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 611.740146] env[62070]: INFO nova.compute.manager [-] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Took 1.20 seconds to deallocate network for instance. [ 611.742136] env[62070]: DEBUG nova.compute.claims [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 611.742855] env[62070]: DEBUG oslo_concurrency.lockutils [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.858437] env[62070]: DEBUG nova.network.neutron [req-5c80f8f1-01a7-4b57-9de0-7c5f7f79cb41 req-63e131c5-232e-4329-9a88-2b7c9721db34 service nova] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 611.908138] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c5cdd5-ec80-455e-8e89-9aa6ed566603 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.922480] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-098d4cec-d82e-4367-bc14-b93894a09689 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.936637] env[62070]: DEBUG nova.network.neutron [req-5c80f8f1-01a7-4b57-9de0-7c5f7f79cb41 req-63e131c5-232e-4329-9a88-2b7c9721db34 service nova] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.968313] env[62070]: DEBUG oslo_concurrency.lockutils [req-5c80f8f1-01a7-4b57-9de0-7c5f7f79cb41 req-63e131c5-232e-4329-9a88-2b7c9721db34 service nova] Releasing lock "refresh_cache-2226072d-16f2-4ea1-a56c-d866554c7379" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.968587] env[62070]: DEBUG nova.compute.manager [req-5c80f8f1-01a7-4b57-9de0-7c5f7f79cb41 req-63e131c5-232e-4329-9a88-2b7c9721db34 service nova] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Received event network-vif-deleted-9037323c-72d6-4bfa-ad3e-0a3d37347560 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 611.969480] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b419e8f8-fc3f-442f-9856-022bc71ff078 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.981381] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-342e180d-6d1e-4ed5-bc58-a32408db1357 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.996961] env[62070]: DEBUG nova.compute.provider_tree [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 612.119699] env[62070]: DEBUG nova.compute.manager [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 612.469348] env[62070]: DEBUG nova.compute.manager [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 612.503207] env[62070]: DEBUG nova.scheduler.client.report [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 612.517149] env[62070]: DEBUG nova.virt.hardware [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 612.517149] env[62070]: DEBUG nova.virt.hardware [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 612.517149] env[62070]: DEBUG nova.virt.hardware [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 612.517329] env[62070]: DEBUG nova.virt.hardware [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 612.517329] env[62070]: DEBUG nova.virt.hardware [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 612.517329] env[62070]: DEBUG nova.virt.hardware [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 612.517329] env[62070]: DEBUG nova.virt.hardware [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 612.517329] env[62070]: DEBUG nova.virt.hardware [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 612.517586] env[62070]: DEBUG nova.virt.hardware [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 612.517586] env[62070]: DEBUG nova.virt.hardware [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 612.517586] env[62070]: DEBUG nova.virt.hardware [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 612.518989] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc99804-f8f4-4bf2-870f-3a0b6fd4f554 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.531987] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e07494-01f7-4983-b193-b1fd5e38e39e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.643134] env[62070]: DEBUG oslo_concurrency.lockutils [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.008232] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.606s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.009630] env[62070]: DEBUG nova.compute.manager [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 613.012399] env[62070]: DEBUG oslo_concurrency.lockutils [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.899s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.014416] env[62070]: INFO nova.compute.claims [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 613.519623] env[62070]: DEBUG nova.compute.utils [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 613.524135] env[62070]: DEBUG nova.compute.manager [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 613.524474] env[62070]: DEBUG nova.network.neutron [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 613.557191] env[62070]: ERROR nova.compute.manager [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8016a15a-fbb1-4dfd-8dba-234cee68c43c, please check neutron logs for more information. [ 613.557191] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 613.557191] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 613.557191] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 613.557191] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 613.557191] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 613.557191] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 613.557191] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 613.557191] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 613.557191] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 613.557191] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 613.557191] env[62070]: ERROR nova.compute.manager raise self.value [ 613.557191] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 613.557191] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 613.557191] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 613.557191] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 613.557789] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 613.557789] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 613.557789] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8016a15a-fbb1-4dfd-8dba-234cee68c43c, please check neutron logs for more information. [ 613.557789] env[62070]: ERROR nova.compute.manager [ 613.557789] env[62070]: Traceback (most recent call last): [ 613.557789] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 613.557789] env[62070]: listener.cb(fileno) [ 613.557789] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 613.557789] env[62070]: result = function(*args, **kwargs) [ 613.557789] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 613.557789] env[62070]: return func(*args, **kwargs) [ 613.557789] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 613.557789] env[62070]: raise e [ 613.557789] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 613.557789] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 613.557789] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 613.557789] env[62070]: created_port_ids = self._update_ports_for_instance( [ 613.557789] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 613.557789] env[62070]: with excutils.save_and_reraise_exception(): [ 613.557789] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 613.557789] env[62070]: self.force_reraise() [ 613.557789] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 613.557789] env[62070]: raise self.value [ 613.557789] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 613.557789] env[62070]: updated_port = self._update_port( [ 613.557789] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 613.557789] env[62070]: _ensure_no_port_binding_failure(port) [ 613.557789] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 613.557789] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 613.558644] env[62070]: nova.exception.PortBindingFailed: Binding failed for port 8016a15a-fbb1-4dfd-8dba-234cee68c43c, please check neutron logs for more information. [ 613.558644] env[62070]: Removing descriptor: 16 [ 613.558644] env[62070]: ERROR nova.compute.manager [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8016a15a-fbb1-4dfd-8dba-234cee68c43c, please check neutron logs for more information. [ 613.558644] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Traceback (most recent call last): [ 613.558644] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 613.558644] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] yield resources [ 613.558644] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 613.558644] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] self.driver.spawn(context, instance, image_meta, [ 613.558644] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 613.558644] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 613.558644] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 613.558644] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] vm_ref = self.build_virtual_machine(instance, [ 613.558996] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 613.558996] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] vif_infos = vmwarevif.get_vif_info(self._session, [ 613.558996] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 613.558996] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] for vif in network_info: [ 613.558996] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 613.558996] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] return self._sync_wrapper(fn, *args, **kwargs) [ 613.558996] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 613.558996] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] self.wait() [ 613.558996] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 613.558996] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] self[:] = self._gt.wait() [ 613.558996] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 613.558996] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] return self._exit_event.wait() [ 613.558996] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 613.559408] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] result = hub.switch() [ 613.559408] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 613.559408] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] return self.greenlet.switch() [ 613.559408] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 613.559408] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] result = function(*args, **kwargs) [ 613.559408] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 613.559408] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] return func(*args, **kwargs) [ 613.559408] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 613.559408] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] raise e [ 613.559408] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 613.559408] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] nwinfo = self.network_api.allocate_for_instance( [ 613.559408] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 613.559408] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] created_port_ids = self._update_ports_for_instance( [ 613.559867] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 613.559867] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] with excutils.save_and_reraise_exception(): [ 613.559867] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 613.559867] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] self.force_reraise() [ 613.559867] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 613.559867] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] raise self.value [ 613.559867] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 613.559867] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] updated_port = self._update_port( [ 613.559867] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 613.559867] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] _ensure_no_port_binding_failure(port) [ 613.559867] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 613.559867] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] raise exception.PortBindingFailed(port_id=port['id']) [ 613.560252] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] nova.exception.PortBindingFailed: Binding failed for port 8016a15a-fbb1-4dfd-8dba-234cee68c43c, please check neutron logs for more information. [ 613.560252] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] [ 613.560252] env[62070]: INFO nova.compute.manager [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Terminating instance [ 613.560882] env[62070]: DEBUG oslo_concurrency.lockutils [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Acquiring lock "refresh_cache-d8478b63-3a62-4afa-950b-edf9774e8ea8" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 613.560882] env[62070]: DEBUG oslo_concurrency.lockutils [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Acquired lock "refresh_cache-d8478b63-3a62-4afa-950b-edf9774e8ea8" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.561534] env[62070]: DEBUG nova.network.neutron [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 613.581908] env[62070]: DEBUG nova.policy [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fdc419e70e7042cf9f68ea3283e84fed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6d33039a6e2146e78dbb0eb74a94bacd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 613.667155] env[62070]: DEBUG nova.compute.manager [req-fc23f212-2190-47e8-946b-9aa73decc2e2 req-6bd3ce60-286b-47ff-8dbd-2dab011447e6 service nova] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Received event network-changed-8016a15a-fbb1-4dfd-8dba-234cee68c43c {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 613.667446] env[62070]: DEBUG nova.compute.manager [req-fc23f212-2190-47e8-946b-9aa73decc2e2 req-6bd3ce60-286b-47ff-8dbd-2dab011447e6 service nova] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Refreshing instance network info cache due to event network-changed-8016a15a-fbb1-4dfd-8dba-234cee68c43c. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 613.667623] env[62070]: DEBUG oslo_concurrency.lockutils [req-fc23f212-2190-47e8-946b-9aa73decc2e2 req-6bd3ce60-286b-47ff-8dbd-2dab011447e6 service nova] Acquiring lock "refresh_cache-d8478b63-3a62-4afa-950b-edf9774e8ea8" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 614.029227] env[62070]: DEBUG nova.compute.manager [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 614.074385] env[62070]: DEBUG nova.network.neutron [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Successfully created port: df0d0694-d2de-4148-8d6d-2700c2ec9a24 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 614.090756] env[62070]: DEBUG nova.network.neutron [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 614.234831] env[62070]: DEBUG nova.network.neutron [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.245259] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquiring lock "963feecc-ff58-4cbb-8d6f-3f9035337087" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.245614] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Lock "963feecc-ff58-4cbb-8d6f-3f9035337087" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.492344] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4bf6d3d-3490-4a1b-91ab-6d66264cd569 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.502427] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b41e85-b73f-4a43-bc0b-a67d26a42059 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.543757] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-925d9f47-5fe8-4f72-9648-f52232bda412 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.553528] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12208d49-5a33-4528-a745-6b77b940b7f4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.569849] env[62070]: DEBUG nova.compute.provider_tree [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 614.737581] env[62070]: DEBUG oslo_concurrency.lockutils [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Releasing lock "refresh_cache-d8478b63-3a62-4afa-950b-edf9774e8ea8" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 614.738255] env[62070]: DEBUG nova.compute.manager [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 614.738569] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 614.739469] env[62070]: DEBUG oslo_concurrency.lockutils [req-fc23f212-2190-47e8-946b-9aa73decc2e2 req-6bd3ce60-286b-47ff-8dbd-2dab011447e6 service nova] Acquired lock "refresh_cache-d8478b63-3a62-4afa-950b-edf9774e8ea8" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.739760] env[62070]: DEBUG nova.network.neutron [req-fc23f212-2190-47e8-946b-9aa73decc2e2 req-6bd3ce60-286b-47ff-8dbd-2dab011447e6 service nova] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Refreshing network info cache for port 8016a15a-fbb1-4dfd-8dba-234cee68c43c {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 614.741242] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3ffe0571-00ad-4a38-8d98-77fff335bdb7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.750626] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a4652a7-9c16-4130-9408-6e71843ca3a3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.772692] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d8478b63-3a62-4afa-950b-edf9774e8ea8 could not be found. [ 614.772920] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 614.773113] env[62070]: INFO nova.compute.manager [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Took 0.03 seconds to destroy the instance on the hypervisor. [ 614.773355] env[62070]: DEBUG oslo.service.loopingcall [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 614.773574] env[62070]: DEBUG nova.compute.manager [-] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 614.773667] env[62070]: DEBUG nova.network.neutron [-] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 614.801546] env[62070]: DEBUG nova.network.neutron [-] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 615.050239] env[62070]: DEBUG nova.compute.manager [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 615.073937] env[62070]: DEBUG nova.scheduler.client.report [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 615.091808] env[62070]: DEBUG nova.virt.hardware [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 615.092115] env[62070]: DEBUG nova.virt.hardware [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 615.092419] env[62070]: DEBUG nova.virt.hardware [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 615.093417] env[62070]: DEBUG nova.virt.hardware [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 615.093417] env[62070]: DEBUG nova.virt.hardware [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 615.093417] env[62070]: DEBUG nova.virt.hardware [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 615.093417] env[62070]: DEBUG nova.virt.hardware [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 615.093417] env[62070]: DEBUG nova.virt.hardware [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 615.093683] env[62070]: DEBUG nova.virt.hardware [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 615.093884] env[62070]: DEBUG nova.virt.hardware [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 615.094184] env[62070]: DEBUG nova.virt.hardware [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 615.095297] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a554d1-5b06-44af-9797-fc376d6f3183 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.104646] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9284ecb1-c9db-4739-a055-f984756ad314 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.259701] env[62070]: DEBUG nova.network.neutron [req-fc23f212-2190-47e8-946b-9aa73decc2e2 req-6bd3ce60-286b-47ff-8dbd-2dab011447e6 service nova] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 615.303759] env[62070]: DEBUG nova.network.neutron [-] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.350586] env[62070]: DEBUG nova.network.neutron [req-fc23f212-2190-47e8-946b-9aa73decc2e2 req-6bd3ce60-286b-47ff-8dbd-2dab011447e6 service nova] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.580590] env[62070]: DEBUG oslo_concurrency.lockutils [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.568s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 615.581152] env[62070]: DEBUG nova.compute.manager [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 615.590908] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 24.427s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.701618] env[62070]: DEBUG nova.compute.manager [req-a10ad6a7-ff1d-4f00-87f3-d0cbafb9204a req-06904f61-d92a-4932-b088-c8c558a8785a service nova] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Received event network-vif-deleted-8016a15a-fbb1-4dfd-8dba-234cee68c43c {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 615.707898] env[62070]: DEBUG nova.compute.manager [req-5583cfdd-9330-425d-9f17-8474cf26088d req-b7de5b9c-d2ef-40f6-9f3b-8c5e8619c672 service nova] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Received event network-changed-df0d0694-d2de-4148-8d6d-2700c2ec9a24 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 615.708114] env[62070]: DEBUG nova.compute.manager [req-5583cfdd-9330-425d-9f17-8474cf26088d req-b7de5b9c-d2ef-40f6-9f3b-8c5e8619c672 service nova] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Refreshing instance network info cache due to event network-changed-df0d0694-d2de-4148-8d6d-2700c2ec9a24. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 615.708333] env[62070]: DEBUG oslo_concurrency.lockutils [req-5583cfdd-9330-425d-9f17-8474cf26088d req-b7de5b9c-d2ef-40f6-9f3b-8c5e8619c672 service nova] Acquiring lock "refresh_cache-d7a90be3-d3d6-4626-944b-b907cf7fb64d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 615.708477] env[62070]: DEBUG oslo_concurrency.lockutils [req-5583cfdd-9330-425d-9f17-8474cf26088d req-b7de5b9c-d2ef-40f6-9f3b-8c5e8619c672 service nova] Acquired lock "refresh_cache-d7a90be3-d3d6-4626-944b-b907cf7fb64d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.708636] env[62070]: DEBUG nova.network.neutron [req-5583cfdd-9330-425d-9f17-8474cf26088d req-b7de5b9c-d2ef-40f6-9f3b-8c5e8619c672 service nova] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Refreshing network info cache for port df0d0694-d2de-4148-8d6d-2700c2ec9a24 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 615.807515] env[62070]: INFO nova.compute.manager [-] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Took 1.03 seconds to deallocate network for instance. [ 615.810433] env[62070]: DEBUG nova.compute.claims [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 615.810624] env[62070]: DEBUG oslo_concurrency.lockutils [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.853557] env[62070]: DEBUG oslo_concurrency.lockutils [req-fc23f212-2190-47e8-946b-9aa73decc2e2 req-6bd3ce60-286b-47ff-8dbd-2dab011447e6 service nova] Releasing lock "refresh_cache-d8478b63-3a62-4afa-950b-edf9774e8ea8" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 616.067933] env[62070]: ERROR nova.compute.manager [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port df0d0694-d2de-4148-8d6d-2700c2ec9a24, please check neutron logs for more information. [ 616.067933] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 616.067933] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 616.067933] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 616.067933] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 616.067933] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 616.067933] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 616.067933] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 616.067933] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 616.067933] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 616.067933] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 616.067933] env[62070]: ERROR nova.compute.manager raise self.value [ 616.067933] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 616.067933] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 616.067933] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 616.067933] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 616.068523] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 616.068523] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 616.068523] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port df0d0694-d2de-4148-8d6d-2700c2ec9a24, please check neutron logs for more information. [ 616.068523] env[62070]: ERROR nova.compute.manager [ 616.068523] env[62070]: Traceback (most recent call last): [ 616.068523] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 616.068523] env[62070]: listener.cb(fileno) [ 616.068523] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 616.068523] env[62070]: result = function(*args, **kwargs) [ 616.068523] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 616.068523] env[62070]: return func(*args, **kwargs) [ 616.068523] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 616.068523] env[62070]: raise e [ 616.068523] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 616.068523] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 616.068523] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 616.068523] env[62070]: created_port_ids = self._update_ports_for_instance( [ 616.068523] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 616.068523] env[62070]: with excutils.save_and_reraise_exception(): [ 616.068523] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 616.068523] env[62070]: self.force_reraise() [ 616.068523] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 616.068523] env[62070]: raise self.value [ 616.068523] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 616.068523] env[62070]: updated_port = self._update_port( [ 616.068523] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 616.068523] env[62070]: _ensure_no_port_binding_failure(port) [ 616.068523] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 616.068523] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 616.069557] env[62070]: nova.exception.PortBindingFailed: Binding failed for port df0d0694-d2de-4148-8d6d-2700c2ec9a24, please check neutron logs for more information. [ 616.069557] env[62070]: Removing descriptor: 14 [ 616.069557] env[62070]: ERROR nova.compute.manager [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port df0d0694-d2de-4148-8d6d-2700c2ec9a24, please check neutron logs for more information. [ 616.069557] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Traceback (most recent call last): [ 616.069557] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 616.069557] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] yield resources [ 616.069557] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 616.069557] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] self.driver.spawn(context, instance, image_meta, [ 616.069557] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 616.069557] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 616.069557] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 616.069557] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] vm_ref = self.build_virtual_machine(instance, [ 616.070051] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 616.070051] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] vif_infos = vmwarevif.get_vif_info(self._session, [ 616.070051] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 616.070051] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] for vif in network_info: [ 616.070051] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 616.070051] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] return self._sync_wrapper(fn, *args, **kwargs) [ 616.070051] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 616.070051] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] self.wait() [ 616.070051] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 616.070051] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] self[:] = self._gt.wait() [ 616.070051] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 616.070051] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] return self._exit_event.wait() [ 616.070051] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 616.070467] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] result = hub.switch() [ 616.070467] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 616.070467] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] return self.greenlet.switch() [ 616.070467] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 616.070467] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] result = function(*args, **kwargs) [ 616.070467] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 616.070467] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] return func(*args, **kwargs) [ 616.070467] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 616.070467] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] raise e [ 616.070467] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 616.070467] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] nwinfo = self.network_api.allocate_for_instance( [ 616.070467] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 616.070467] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] created_port_ids = self._update_ports_for_instance( [ 616.070893] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 616.070893] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] with excutils.save_and_reraise_exception(): [ 616.070893] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 616.070893] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] self.force_reraise() [ 616.070893] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 616.070893] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] raise self.value [ 616.070893] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 616.070893] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] updated_port = self._update_port( [ 616.070893] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 616.070893] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] _ensure_no_port_binding_failure(port) [ 616.070893] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 616.070893] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] raise exception.PortBindingFailed(port_id=port['id']) [ 616.074494] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] nova.exception.PortBindingFailed: Binding failed for port df0d0694-d2de-4148-8d6d-2700c2ec9a24, please check neutron logs for more information. [ 616.074494] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] [ 616.074494] env[62070]: INFO nova.compute.manager [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Terminating instance [ 616.074494] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Acquiring lock "refresh_cache-d7a90be3-d3d6-4626-944b-b907cf7fb64d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 616.092946] env[62070]: DEBUG nova.compute.utils [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 616.094316] env[62070]: DEBUG nova.compute.manager [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 616.094506] env[62070]: DEBUG nova.network.neutron [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 616.182320] env[62070]: DEBUG nova.policy [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eb1a43a0e3c0432882886062fd96d238', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a94b093a153b4209bfa14ccc036e8e66', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 616.238831] env[62070]: DEBUG nova.network.neutron [req-5583cfdd-9330-425d-9f17-8474cf26088d req-b7de5b9c-d2ef-40f6-9f3b-8c5e8619c672 service nova] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 616.409399] env[62070]: DEBUG nova.network.neutron [req-5583cfdd-9330-425d-9f17-8474cf26088d req-b7de5b9c-d2ef-40f6-9f3b-8c5e8619c672 service nova] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.486756] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-201d0f34-8909-4275-ac68-9bb309183bc8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.495095] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20892be9-15b2-4287-ae70-c5199c6b397e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.531232] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3be8ce7-4ac0-4ac3-bfe1-4ef9c8f01def {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.539017] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dafadc2-d396-47bd-b5f9-7d31d029d47b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.553104] env[62070]: DEBUG nova.compute.provider_tree [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 616.602334] env[62070]: DEBUG nova.compute.manager [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 616.694522] env[62070]: DEBUG nova.network.neutron [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Successfully created port: 1cce7a44-4a4c-4293-a7af-31a45ca20632 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 616.913362] env[62070]: DEBUG oslo_concurrency.lockutils [req-5583cfdd-9330-425d-9f17-8474cf26088d req-b7de5b9c-d2ef-40f6-9f3b-8c5e8619c672 service nova] Releasing lock "refresh_cache-d7a90be3-d3d6-4626-944b-b907cf7fb64d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 616.914101] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Acquired lock "refresh_cache-d7a90be3-d3d6-4626-944b-b907cf7fb64d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 616.914301] env[62070]: DEBUG nova.network.neutron [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 617.057067] env[62070]: DEBUG nova.scheduler.client.report [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 617.436432] env[62070]: DEBUG nova.network.neutron [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 617.564026] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.975s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.564765] env[62070]: ERROR nova.compute.manager [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 00c97619-997b-46ca-bd58-aa05d59eb0f7, please check neutron logs for more information. [ 617.564765] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] Traceback (most recent call last): [ 617.564765] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 617.564765] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] self.driver.spawn(context, instance, image_meta, [ 617.564765] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 617.564765] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] self._vmops.spawn(context, instance, image_meta, injected_files, [ 617.564765] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 617.564765] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] vm_ref = self.build_virtual_machine(instance, [ 617.564765] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 617.564765] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] vif_infos = vmwarevif.get_vif_info(self._session, [ 617.564765] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 617.565217] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] for vif in network_info: [ 617.565217] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 617.565217] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] return self._sync_wrapper(fn, *args, **kwargs) [ 617.565217] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 617.565217] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] self.wait() [ 617.565217] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 617.565217] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] self[:] = self._gt.wait() [ 617.565217] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 617.565217] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] return self._exit_event.wait() [ 617.565217] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 617.565217] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] result = hub.switch() [ 617.565217] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 617.565217] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] return self.greenlet.switch() [ 617.565666] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 617.565666] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] result = function(*args, **kwargs) [ 617.565666] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 617.565666] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] return func(*args, **kwargs) [ 617.565666] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 617.565666] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] raise e [ 617.565666] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 617.565666] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] nwinfo = self.network_api.allocate_for_instance( [ 617.565666] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 617.565666] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] created_port_ids = self._update_ports_for_instance( [ 617.565666] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 617.565666] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] with excutils.save_and_reraise_exception(): [ 617.565666] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 617.566112] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] self.force_reraise() [ 617.566112] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 617.566112] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] raise self.value [ 617.566112] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 617.566112] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] updated_port = self._update_port( [ 617.566112] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 617.566112] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] _ensure_no_port_binding_failure(port) [ 617.566112] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 617.566112] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] raise exception.PortBindingFailed(port_id=port['id']) [ 617.566112] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] nova.exception.PortBindingFailed: Binding failed for port 00c97619-997b-46ca-bd58-aa05d59eb0f7, please check neutron logs for more information. [ 617.566112] env[62070]: ERROR nova.compute.manager [instance: 88251634-8add-4216-b789-dfee77a1ae09] [ 617.566546] env[62070]: DEBUG nova.compute.utils [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Binding failed for port 00c97619-997b-46ca-bd58-aa05d59eb0f7, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 617.567716] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.446s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.571120] env[62070]: INFO nova.compute.claims [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 617.574325] env[62070]: DEBUG nova.network.neutron [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.575733] env[62070]: DEBUG nova.compute.manager [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Build of instance 88251634-8add-4216-b789-dfee77a1ae09 was re-scheduled: Binding failed for port 00c97619-997b-46ca-bd58-aa05d59eb0f7, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 617.580250] env[62070]: DEBUG nova.compute.manager [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 617.580485] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Acquiring lock "refresh_cache-88251634-8add-4216-b789-dfee77a1ae09" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 617.580630] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Acquired lock "refresh_cache-88251634-8add-4216-b789-dfee77a1ae09" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.580825] env[62070]: DEBUG nova.network.neutron [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 617.617669] env[62070]: DEBUG nova.compute.manager [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 617.642813] env[62070]: DEBUG nova.virt.hardware [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 617.643099] env[62070]: DEBUG nova.virt.hardware [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 617.643266] env[62070]: DEBUG nova.virt.hardware [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 617.643449] env[62070]: DEBUG nova.virt.hardware [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 617.643594] env[62070]: DEBUG nova.virt.hardware [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 617.643734] env[62070]: DEBUG nova.virt.hardware [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 617.643936] env[62070]: DEBUG nova.virt.hardware [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 617.644183] env[62070]: DEBUG nova.virt.hardware [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 617.644390] env[62070]: DEBUG nova.virt.hardware [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 617.644765] env[62070]: DEBUG nova.virt.hardware [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 617.644850] env[62070]: DEBUG nova.virt.hardware [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 617.645974] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c15bc67-afd7-4c40-b103-a68f021074a9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.656505] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71c3454f-6d0a-4f91-a5d9-323460a5e54b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.848699] env[62070]: DEBUG nova.compute.manager [req-7f6fa6d0-d61f-4f7c-8bfa-81ada6c9d0da req-e5065a00-06d6-47e9-8336-b63daa00a718 service nova] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Received event network-vif-deleted-df0d0694-d2de-4148-8d6d-2700c2ec9a24 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 618.087528] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Releasing lock "refresh_cache-d7a90be3-d3d6-4626-944b-b907cf7fb64d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 618.087528] env[62070]: DEBUG nova.compute.manager [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 618.087528] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 618.089095] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-82fc0791-4136-4d4c-b2c5-beaf5df85d7f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.101404] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d8a6e7f-6bae-44f1-86f4-8097a43071b9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.119173] env[62070]: ERROR nova.compute.manager [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1cce7a44-4a4c-4293-a7af-31a45ca20632, please check neutron logs for more information. [ 618.119173] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 618.119173] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 618.119173] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 618.119173] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 618.119173] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 618.119173] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 618.119173] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 618.119173] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 618.119173] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 618.119173] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 618.119173] env[62070]: ERROR nova.compute.manager raise self.value [ 618.119173] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 618.119173] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 618.119173] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 618.119173] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 618.119713] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 618.119713] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 618.119713] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1cce7a44-4a4c-4293-a7af-31a45ca20632, please check neutron logs for more information. [ 618.119713] env[62070]: ERROR nova.compute.manager [ 618.119713] env[62070]: Traceback (most recent call last): [ 618.119713] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 618.119713] env[62070]: listener.cb(fileno) [ 618.119713] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 618.119713] env[62070]: result = function(*args, **kwargs) [ 618.119713] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 618.119713] env[62070]: return func(*args, **kwargs) [ 618.119713] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 618.119713] env[62070]: raise e [ 618.119713] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 618.119713] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 618.119713] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 618.119713] env[62070]: created_port_ids = self._update_ports_for_instance( [ 618.119713] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 618.119713] env[62070]: with excutils.save_and_reraise_exception(): [ 618.119713] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 618.119713] env[62070]: self.force_reraise() [ 618.119713] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 618.119713] env[62070]: raise self.value [ 618.119713] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 618.119713] env[62070]: updated_port = self._update_port( [ 618.119713] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 618.119713] env[62070]: _ensure_no_port_binding_failure(port) [ 618.119713] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 618.119713] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 618.120553] env[62070]: nova.exception.PortBindingFailed: Binding failed for port 1cce7a44-4a4c-4293-a7af-31a45ca20632, please check neutron logs for more information. [ 618.120553] env[62070]: Removing descriptor: 14 [ 618.120553] env[62070]: ERROR nova.compute.manager [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1cce7a44-4a4c-4293-a7af-31a45ca20632, please check neutron logs for more information. [ 618.120553] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Traceback (most recent call last): [ 618.120553] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 618.120553] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] yield resources [ 618.120553] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 618.120553] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] self.driver.spawn(context, instance, image_meta, [ 618.120553] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 618.120553] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] self._vmops.spawn(context, instance, image_meta, injected_files, [ 618.120553] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 618.120553] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] vm_ref = self.build_virtual_machine(instance, [ 618.120968] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 618.120968] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] vif_infos = vmwarevif.get_vif_info(self._session, [ 618.120968] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 618.120968] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] for vif in network_info: [ 618.120968] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 618.120968] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] return self._sync_wrapper(fn, *args, **kwargs) [ 618.120968] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 618.120968] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] self.wait() [ 618.120968] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 618.120968] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] self[:] = self._gt.wait() [ 618.120968] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 618.120968] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] return self._exit_event.wait() [ 618.120968] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 618.121396] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] result = hub.switch() [ 618.121396] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 618.121396] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] return self.greenlet.switch() [ 618.121396] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 618.121396] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] result = function(*args, **kwargs) [ 618.121396] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 618.121396] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] return func(*args, **kwargs) [ 618.121396] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 618.121396] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] raise e [ 618.121396] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 618.121396] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] nwinfo = self.network_api.allocate_for_instance( [ 618.121396] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 618.121396] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] created_port_ids = self._update_ports_for_instance( [ 618.121784] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 618.121784] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] with excutils.save_and_reraise_exception(): [ 618.121784] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 618.121784] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] self.force_reraise() [ 618.121784] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 618.121784] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] raise self.value [ 618.121784] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 618.121784] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] updated_port = self._update_port( [ 618.121784] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 618.121784] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] _ensure_no_port_binding_failure(port) [ 618.121784] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 618.121784] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] raise exception.PortBindingFailed(port_id=port['id']) [ 618.122152] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] nova.exception.PortBindingFailed: Binding failed for port 1cce7a44-4a4c-4293-a7af-31a45ca20632, please check neutron logs for more information. [ 618.122152] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] [ 618.122152] env[62070]: INFO nova.compute.manager [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Terminating instance [ 618.123251] env[62070]: DEBUG oslo_concurrency.lockutils [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Acquiring lock "refresh_cache-ad0dd218-5e45-4d22-9d94-5c25ba8b22ec" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 618.123251] env[62070]: DEBUG oslo_concurrency.lockutils [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Acquired lock "refresh_cache-ad0dd218-5e45-4d22-9d94-5c25ba8b22ec" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.123415] env[62070]: DEBUG nova.network.neutron [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 618.128965] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d7a90be3-d3d6-4626-944b-b907cf7fb64d could not be found. [ 618.129173] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 618.129345] env[62070]: INFO nova.compute.manager [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 618.129583] env[62070]: DEBUG oslo.service.loopingcall [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 618.131047] env[62070]: DEBUG nova.compute.manager [-] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 618.131575] env[62070]: DEBUG nova.network.neutron [-] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 618.139913] env[62070]: DEBUG nova.network.neutron [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 618.154059] env[62070]: DEBUG nova.network.neutron [-] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 618.273995] env[62070]: DEBUG nova.network.neutron [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.657641] env[62070]: DEBUG nova.network.neutron [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 618.659461] env[62070]: DEBUG nova.network.neutron [-] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.775027] env[62070]: DEBUG nova.network.neutron [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.777820] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Releasing lock "refresh_cache-88251634-8add-4216-b789-dfee77a1ae09" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 618.778058] env[62070]: DEBUG nova.compute.manager [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 618.778246] env[62070]: DEBUG nova.compute.manager [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 618.778410] env[62070]: DEBUG nova.network.neutron [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 618.813932] env[62070]: DEBUG nova.network.neutron [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 619.056252] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f17e28ca-0d6a-4c35-a8bc-b4b77a15854d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.064128] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59fdb9cc-c148-4bde-bdb1-10d70183e830 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.100210] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c8d88b-7890-4fd1-a969-2cd000d87e46 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.109199] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27743499-88dd-4438-93df-d48128121a31 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.122337] env[62070]: DEBUG nova.compute.provider_tree [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 619.167095] env[62070]: INFO nova.compute.manager [-] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Took 1.04 seconds to deallocate network for instance. [ 619.170698] env[62070]: DEBUG nova.compute.claims [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 619.170835] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.280061] env[62070]: DEBUG oslo_concurrency.lockutils [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Releasing lock "refresh_cache-ad0dd218-5e45-4d22-9d94-5c25ba8b22ec" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 619.280061] env[62070]: DEBUG nova.compute.manager [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 619.280061] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 619.280061] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6f3d99cd-98bd-4d3b-9dcb-545765e2fd8b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.289578] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0430eeeb-34f9-409c-8345-cec810a932f5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.312868] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ad0dd218-5e45-4d22-9d94-5c25ba8b22ec could not be found. [ 619.313112] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 619.313295] env[62070]: INFO nova.compute.manager [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Took 0.03 seconds to destroy the instance on the hypervisor. [ 619.313648] env[62070]: DEBUG oslo.service.loopingcall [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 619.313881] env[62070]: DEBUG nova.compute.manager [-] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 619.313881] env[62070]: DEBUG nova.network.neutron [-] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 619.320177] env[62070]: DEBUG nova.network.neutron [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.339274] env[62070]: DEBUG nova.network.neutron [-] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 619.627907] env[62070]: DEBUG nova.scheduler.client.report [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 619.823378] env[62070]: INFO nova.compute.manager [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] [instance: 88251634-8add-4216-b789-dfee77a1ae09] Took 1.04 seconds to deallocate network for instance. [ 619.841998] env[62070]: DEBUG nova.network.neutron [-] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.010773] env[62070]: DEBUG nova.compute.manager [req-20b63f2f-48c0-45c1-b951-66886638f53f req-87f2e399-b91f-446e-a464-1928423d4380 service nova] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Received event network-changed-1cce7a44-4a4c-4293-a7af-31a45ca20632 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 620.010956] env[62070]: DEBUG nova.compute.manager [req-20b63f2f-48c0-45c1-b951-66886638f53f req-87f2e399-b91f-446e-a464-1928423d4380 service nova] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Refreshing instance network info cache due to event network-changed-1cce7a44-4a4c-4293-a7af-31a45ca20632. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 620.011187] env[62070]: DEBUG oslo_concurrency.lockutils [req-20b63f2f-48c0-45c1-b951-66886638f53f req-87f2e399-b91f-446e-a464-1928423d4380 service nova] Acquiring lock "refresh_cache-ad0dd218-5e45-4d22-9d94-5c25ba8b22ec" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 620.011321] env[62070]: DEBUG oslo_concurrency.lockutils [req-20b63f2f-48c0-45c1-b951-66886638f53f req-87f2e399-b91f-446e-a464-1928423d4380 service nova] Acquired lock "refresh_cache-ad0dd218-5e45-4d22-9d94-5c25ba8b22ec" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.011499] env[62070]: DEBUG nova.network.neutron [req-20b63f2f-48c0-45c1-b951-66886638f53f req-87f2e399-b91f-446e-a464-1928423d4380 service nova] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Refreshing network info cache for port 1cce7a44-4a4c-4293-a7af-31a45ca20632 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 620.132504] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.565s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 620.133049] env[62070]: DEBUG nova.compute.manager [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 620.135877] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.899s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.346802] env[62070]: INFO nova.compute.manager [-] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Took 1.03 seconds to deallocate network for instance. [ 620.350875] env[62070]: DEBUG nova.compute.claims [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 620.351316] env[62070]: DEBUG oslo_concurrency.lockutils [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.533275] env[62070]: DEBUG nova.network.neutron [req-20b63f2f-48c0-45c1-b951-66886638f53f req-87f2e399-b91f-446e-a464-1928423d4380 service nova] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 620.647166] env[62070]: DEBUG nova.compute.utils [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 620.659141] env[62070]: DEBUG nova.compute.manager [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 620.659141] env[62070]: DEBUG nova.network.neutron [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 620.661859] env[62070]: DEBUG nova.network.neutron [req-20b63f2f-48c0-45c1-b951-66886638f53f req-87f2e399-b91f-446e-a464-1928423d4380 service nova] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.749651] env[62070]: DEBUG nova.policy [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ee63bc0615746e59c53953733e9c500', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2bd021f028d241478d703c6b573de963', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 620.872819] env[62070]: INFO nova.scheduler.client.report [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Deleted allocations for instance 88251634-8add-4216-b789-dfee77a1ae09 [ 621.117282] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-660ed994-7b7f-489d-b841-808ede8ef17c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.126656] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ccd27a-60a2-4157-959e-0040c7d244f6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.163671] env[62070]: DEBUG nova.compute.manager [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 621.167543] env[62070]: DEBUG oslo_concurrency.lockutils [req-20b63f2f-48c0-45c1-b951-66886638f53f req-87f2e399-b91f-446e-a464-1928423d4380 service nova] Releasing lock "refresh_cache-ad0dd218-5e45-4d22-9d94-5c25ba8b22ec" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 621.168118] env[62070]: DEBUG nova.compute.manager [req-20b63f2f-48c0-45c1-b951-66886638f53f req-87f2e399-b91f-446e-a464-1928423d4380 service nova] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Received event network-vif-deleted-1cce7a44-4a4c-4293-a7af-31a45ca20632 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 621.168879] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c69b2eef-88be-4a8a-a647-f8a5c18b9add {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.182278] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8eb0775-b183-47d2-8dfc-52a80f98f757 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.201986] env[62070]: DEBUG nova.compute.provider_tree [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 621.207329] env[62070]: DEBUG nova.network.neutron [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Successfully created port: f047973c-4e00-4312-949c-099b3b04f342 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 621.389803] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1f16b43e-5915-487b-ba0a-4bf16e720213 tempest-ImagesNegativeTestJSON-1253036663 tempest-ImagesNegativeTestJSON-1253036663-project-member] Lock "88251634-8add-4216-b789-dfee77a1ae09" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.548s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 621.705412] env[62070]: DEBUG nova.scheduler.client.report [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 621.895277] env[62070]: DEBUG nova.compute.manager [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 622.185045] env[62070]: DEBUG nova.compute.manager [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 622.218674] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.080s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 622.218674] env[62070]: ERROR nova.compute.manager [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 247acd8a-3707-4fc6-a29f-66b81e3af752, please check neutron logs for more information. [ 622.218674] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] Traceback (most recent call last): [ 622.218674] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 622.218674] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] self.driver.spawn(context, instance, image_meta, [ 622.218674] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 622.218674] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] self._vmops.spawn(context, instance, image_meta, injected_files, [ 622.218674] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 622.218674] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] vm_ref = self.build_virtual_machine(instance, [ 622.219209] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 622.219209] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] vif_infos = vmwarevif.get_vif_info(self._session, [ 622.219209] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 622.219209] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] for vif in network_info: [ 622.219209] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 622.219209] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] return self._sync_wrapper(fn, *args, **kwargs) [ 622.219209] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 622.219209] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] self.wait() [ 622.219209] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 622.219209] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] self[:] = self._gt.wait() [ 622.219209] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 622.219209] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] return self._exit_event.wait() [ 622.219209] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 622.220111] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] result = hub.switch() [ 622.220111] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 622.220111] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] return self.greenlet.switch() [ 622.220111] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 622.220111] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] result = function(*args, **kwargs) [ 622.220111] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 622.220111] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] return func(*args, **kwargs) [ 622.220111] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 622.220111] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] raise e [ 622.220111] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 622.220111] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] nwinfo = self.network_api.allocate_for_instance( [ 622.220111] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 622.220111] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] created_port_ids = self._update_ports_for_instance( [ 622.220792] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 622.220792] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] with excutils.save_and_reraise_exception(): [ 622.220792] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 622.220792] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] self.force_reraise() [ 622.220792] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 622.220792] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] raise self.value [ 622.220792] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 622.220792] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] updated_port = self._update_port( [ 622.220792] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 622.220792] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] _ensure_no_port_binding_failure(port) [ 622.220792] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 622.220792] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] raise exception.PortBindingFailed(port_id=port['id']) [ 622.221171] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] nova.exception.PortBindingFailed: Binding failed for port 247acd8a-3707-4fc6-a29f-66b81e3af752, please check neutron logs for more information. [ 622.221171] env[62070]: ERROR nova.compute.manager [instance: 495a15b2-20bd-44d2-8020-816031e89832] [ 622.221171] env[62070]: DEBUG nova.compute.utils [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Binding failed for port 247acd8a-3707-4fc6-a29f-66b81e3af752, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 622.222869] env[62070]: DEBUG nova.virt.hardware [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 622.223142] env[62070]: DEBUG nova.virt.hardware [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 622.223468] env[62070]: DEBUG nova.virt.hardware [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 622.223591] env[62070]: DEBUG nova.virt.hardware [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 622.223794] env[62070]: DEBUG nova.virt.hardware [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 622.223993] env[62070]: DEBUG nova.virt.hardware [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 622.224535] env[62070]: DEBUG nova.virt.hardware [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 622.224772] env[62070]: DEBUG nova.virt.hardware [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 622.225535] env[62070]: DEBUG nova.virt.hardware [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 622.225535] env[62070]: DEBUG nova.virt.hardware [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 622.225535] env[62070]: DEBUG nova.virt.hardware [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 622.225761] env[62070]: DEBUG oslo_concurrency.lockutils [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.542s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.227476] env[62070]: INFO nova.compute.claims [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 622.234085] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bba0fc2-cb19-40be-ab68-94492b92c093 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.237216] env[62070]: DEBUG nova.compute.manager [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Build of instance 495a15b2-20bd-44d2-8020-816031e89832 was re-scheduled: Binding failed for port 247acd8a-3707-4fc6-a29f-66b81e3af752, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 622.237675] env[62070]: DEBUG nova.compute.manager [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 622.237926] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Acquiring lock "refresh_cache-495a15b2-20bd-44d2-8020-816031e89832" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.241981] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Acquired lock "refresh_cache-495a15b2-20bd-44d2-8020-816031e89832" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.242228] env[62070]: DEBUG nova.network.neutron [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 622.249158] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8494a4f0-ee1a-4b0b-b995-768bc47388f5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.393858] env[62070]: DEBUG nova.compute.manager [req-aa5d5512-fb51-4fa2-a73f-f6ba969f1999 req-cdef1ea4-58e6-4482-b086-fd9829709dd3 service nova] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Received event network-changed-f047973c-4e00-4312-949c-099b3b04f342 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 622.394067] env[62070]: DEBUG nova.compute.manager [req-aa5d5512-fb51-4fa2-a73f-f6ba969f1999 req-cdef1ea4-58e6-4482-b086-fd9829709dd3 service nova] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Refreshing instance network info cache due to event network-changed-f047973c-4e00-4312-949c-099b3b04f342. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 622.394420] env[62070]: DEBUG oslo_concurrency.lockutils [req-aa5d5512-fb51-4fa2-a73f-f6ba969f1999 req-cdef1ea4-58e6-4482-b086-fd9829709dd3 service nova] Acquiring lock "refresh_cache-3ee4e051-f51d-4840-a918-fdedad020557" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.394503] env[62070]: DEBUG oslo_concurrency.lockutils [req-aa5d5512-fb51-4fa2-a73f-f6ba969f1999 req-cdef1ea4-58e6-4482-b086-fd9829709dd3 service nova] Acquired lock "refresh_cache-3ee4e051-f51d-4840-a918-fdedad020557" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.394637] env[62070]: DEBUG nova.network.neutron [req-aa5d5512-fb51-4fa2-a73f-f6ba969f1999 req-cdef1ea4-58e6-4482-b086-fd9829709dd3 service nova] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Refreshing network info cache for port f047973c-4e00-4312-949c-099b3b04f342 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 622.417614] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.502102] env[62070]: ERROR nova.compute.manager [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f047973c-4e00-4312-949c-099b3b04f342, please check neutron logs for more information. [ 622.502102] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 622.502102] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 622.502102] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 622.502102] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 622.502102] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 622.502102] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 622.502102] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 622.502102] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 622.502102] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 622.502102] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 622.502102] env[62070]: ERROR nova.compute.manager raise self.value [ 622.502102] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 622.502102] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 622.502102] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 622.502102] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 622.502701] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 622.502701] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 622.502701] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f047973c-4e00-4312-949c-099b3b04f342, please check neutron logs for more information. [ 622.502701] env[62070]: ERROR nova.compute.manager [ 622.502701] env[62070]: Traceback (most recent call last): [ 622.502701] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 622.502701] env[62070]: listener.cb(fileno) [ 622.502701] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 622.502701] env[62070]: result = function(*args, **kwargs) [ 622.502701] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 622.502701] env[62070]: return func(*args, **kwargs) [ 622.502701] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 622.502701] env[62070]: raise e [ 622.502701] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 622.502701] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 622.502701] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 622.502701] env[62070]: created_port_ids = self._update_ports_for_instance( [ 622.502701] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 622.502701] env[62070]: with excutils.save_and_reraise_exception(): [ 622.502701] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 622.502701] env[62070]: self.force_reraise() [ 622.502701] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 622.502701] env[62070]: raise self.value [ 622.502701] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 622.502701] env[62070]: updated_port = self._update_port( [ 622.502701] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 622.502701] env[62070]: _ensure_no_port_binding_failure(port) [ 622.502701] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 622.502701] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 622.503680] env[62070]: nova.exception.PortBindingFailed: Binding failed for port f047973c-4e00-4312-949c-099b3b04f342, please check neutron logs for more information. [ 622.503680] env[62070]: Removing descriptor: 14 [ 622.503680] env[62070]: ERROR nova.compute.manager [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f047973c-4e00-4312-949c-099b3b04f342, please check neutron logs for more information. [ 622.503680] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Traceback (most recent call last): [ 622.503680] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 622.503680] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] yield resources [ 622.503680] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 622.503680] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] self.driver.spawn(context, instance, image_meta, [ 622.503680] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 622.503680] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] self._vmops.spawn(context, instance, image_meta, injected_files, [ 622.503680] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 622.503680] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] vm_ref = self.build_virtual_machine(instance, [ 622.504104] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 622.504104] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] vif_infos = vmwarevif.get_vif_info(self._session, [ 622.504104] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 622.504104] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] for vif in network_info: [ 622.504104] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 622.504104] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] return self._sync_wrapper(fn, *args, **kwargs) [ 622.504104] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 622.504104] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] self.wait() [ 622.504104] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 622.504104] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] self[:] = self._gt.wait() [ 622.504104] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 622.504104] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] return self._exit_event.wait() [ 622.504104] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 622.504630] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] result = hub.switch() [ 622.504630] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 622.504630] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] return self.greenlet.switch() [ 622.504630] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 622.504630] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] result = function(*args, **kwargs) [ 622.504630] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 622.504630] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] return func(*args, **kwargs) [ 622.504630] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 622.504630] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] raise e [ 622.504630] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 622.504630] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] nwinfo = self.network_api.allocate_for_instance( [ 622.504630] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 622.504630] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] created_port_ids = self._update_ports_for_instance( [ 622.505100] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 622.505100] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] with excutils.save_and_reraise_exception(): [ 622.505100] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 622.505100] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] self.force_reraise() [ 622.505100] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 622.505100] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] raise self.value [ 622.505100] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 622.505100] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] updated_port = self._update_port( [ 622.505100] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 622.505100] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] _ensure_no_port_binding_failure(port) [ 622.505100] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 622.505100] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] raise exception.PortBindingFailed(port_id=port['id']) [ 622.505691] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] nova.exception.PortBindingFailed: Binding failed for port f047973c-4e00-4312-949c-099b3b04f342, please check neutron logs for more information. [ 622.505691] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] [ 622.505691] env[62070]: INFO nova.compute.manager [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Terminating instance [ 622.506528] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Acquiring lock "refresh_cache-3ee4e051-f51d-4840-a918-fdedad020557" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.766100] env[62070]: DEBUG nova.network.neutron [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 622.926636] env[62070]: DEBUG nova.network.neutron [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.926636] env[62070]: DEBUG nova.network.neutron [req-aa5d5512-fb51-4fa2-a73f-f6ba969f1999 req-cdef1ea4-58e6-4482-b086-fd9829709dd3 service nova] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 623.089333] env[62070]: DEBUG nova.network.neutron [req-aa5d5512-fb51-4fa2-a73f-f6ba969f1999 req-cdef1ea4-58e6-4482-b086-fd9829709dd3 service nova] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.430143] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Releasing lock "refresh_cache-495a15b2-20bd-44d2-8020-816031e89832" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.430404] env[62070]: DEBUG nova.compute.manager [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 623.431156] env[62070]: DEBUG nova.compute.manager [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 623.431820] env[62070]: DEBUG nova.network.neutron [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 623.458119] env[62070]: DEBUG nova.network.neutron [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 623.591720] env[62070]: DEBUG oslo_concurrency.lockutils [req-aa5d5512-fb51-4fa2-a73f-f6ba969f1999 req-cdef1ea4-58e6-4482-b086-fd9829709dd3 service nova] Releasing lock "refresh_cache-3ee4e051-f51d-4840-a918-fdedad020557" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.592306] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Acquired lock "refresh_cache-3ee4e051-f51d-4840-a918-fdedad020557" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.592619] env[62070]: DEBUG nova.network.neutron [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 623.697032] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f9a333-0441-4c12-9cb7-1e36112a38e8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.704900] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a13e397-4e3b-48a9-b14c-834991d65e5b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.747029] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc1343d-91bb-4b5d-a04e-b7edbf0d7952 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.753844] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd7d73e4-503c-4690-8581-d809753ac735 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.771592] env[62070]: DEBUG nova.compute.provider_tree [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 623.961778] env[62070]: DEBUG nova.network.neutron [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.135595] env[62070]: DEBUG nova.network.neutron [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 624.275756] env[62070]: DEBUG nova.scheduler.client.report [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 624.303101] env[62070]: DEBUG nova.network.neutron [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.453804] env[62070]: DEBUG nova.compute.manager [req-aa20a07e-ea17-4b97-89e6-f7145a8356c1 req-48ce5a96-60af-425f-ae95-d96d05dedca1 service nova] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Received event network-vif-deleted-f047973c-4e00-4312-949c-099b3b04f342 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 624.464509] env[62070]: INFO nova.compute.manager [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] [instance: 495a15b2-20bd-44d2-8020-816031e89832] Took 1.03 seconds to deallocate network for instance. [ 624.784041] env[62070]: DEBUG oslo_concurrency.lockutils [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.557s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.784041] env[62070]: DEBUG nova.compute.manager [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 624.789982] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.460s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.804799] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Releasing lock "refresh_cache-3ee4e051-f51d-4840-a918-fdedad020557" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 624.805587] env[62070]: DEBUG nova.compute.manager [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 624.805762] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 624.806139] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a093ba87-db00-4521-84b0-1f9e977f2a1e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.819342] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be17d45a-57e8-48e8-95ad-a4f27d1d00f1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.848216] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3ee4e051-f51d-4840-a918-fdedad020557 could not be found. [ 624.848482] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 624.848659] env[62070]: INFO nova.compute.manager [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Took 0.04 seconds to destroy the instance on the hypervisor. [ 624.848908] env[62070]: DEBUG oslo.service.loopingcall [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 624.851846] env[62070]: DEBUG nova.compute.manager [-] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 624.851846] env[62070]: DEBUG nova.network.neutron [-] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 624.876530] env[62070]: DEBUG nova.network.neutron [-] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 625.290898] env[62070]: DEBUG nova.compute.utils [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 625.292296] env[62070]: DEBUG nova.compute.manager [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 625.292463] env[62070]: DEBUG nova.network.neutron [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 625.364091] env[62070]: DEBUG nova.policy [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f866f97eed1a41b39b4cd552102c6e21', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9191f0e6c2ee401abca64c0780e230bf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 625.379247] env[62070]: DEBUG nova.network.neutron [-] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.509168] env[62070]: INFO nova.scheduler.client.report [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Deleted allocations for instance 495a15b2-20bd-44d2-8020-816031e89832 [ 625.730810] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef25c73-81bd-4582-95c1-9f05b8e3e103 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.738926] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c590da-8172-4194-8050-46717139cc51 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.768615] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c2fb619-fdcf-4d69-a5a5-8a627dca1e37 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.776471] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13cd6b3f-b457-4e71-bfdf-f0b6adf3f435 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.790127] env[62070]: DEBUG nova.compute.provider_tree [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 625.795474] env[62070]: DEBUG nova.compute.manager [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 625.886386] env[62070]: INFO nova.compute.manager [-] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Took 1.03 seconds to deallocate network for instance. [ 625.889371] env[62070]: DEBUG nova.compute.claims [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 625.889371] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.024586] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3399fbf7-c899-4cd9-8ca2-2d4fdce15ab9 tempest-InstanceActionsNegativeTestJSON-1068104952 tempest-InstanceActionsNegativeTestJSON-1068104952-project-member] Lock "495a15b2-20bd-44d2-8020-816031e89832" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.196s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.069799] env[62070]: DEBUG nova.network.neutron [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Successfully created port: 676dde2d-c1da-454a-a873-1984ec64fadb {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 626.297233] env[62070]: DEBUG nova.scheduler.client.report [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 626.527482] env[62070]: DEBUG nova.compute.manager [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 626.806669] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.017s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.806941] env[62070]: ERROR nova.compute.manager [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port de7507a7-5aad-4849-8609-8ff49b3f040e, please check neutron logs for more information. [ 626.806941] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Traceback (most recent call last): [ 626.806941] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 626.806941] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] self.driver.spawn(context, instance, image_meta, [ 626.806941] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 626.806941] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 626.806941] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 626.806941] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] vm_ref = self.build_virtual_machine(instance, [ 626.806941] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 626.806941] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] vif_infos = vmwarevif.get_vif_info(self._session, [ 626.806941] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 626.807255] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] for vif in network_info: [ 626.807255] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 626.807255] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] return self._sync_wrapper(fn, *args, **kwargs) [ 626.807255] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 626.807255] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] self.wait() [ 626.807255] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 626.807255] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] self[:] = self._gt.wait() [ 626.807255] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 626.807255] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] return self._exit_event.wait() [ 626.807255] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 626.807255] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] result = hub.switch() [ 626.807255] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 626.807255] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] return self.greenlet.switch() [ 626.808036] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 626.808036] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] result = function(*args, **kwargs) [ 626.808036] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 626.808036] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] return func(*args, **kwargs) [ 626.808036] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 626.808036] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] raise e [ 626.808036] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 626.808036] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] nwinfo = self.network_api.allocate_for_instance( [ 626.808036] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 626.808036] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] created_port_ids = self._update_ports_for_instance( [ 626.808036] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 626.808036] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] with excutils.save_and_reraise_exception(): [ 626.808036] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 626.808381] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] self.force_reraise() [ 626.808381] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 626.808381] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] raise self.value [ 626.808381] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 626.808381] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] updated_port = self._update_port( [ 626.808381] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 626.808381] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] _ensure_no_port_binding_failure(port) [ 626.808381] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 626.808381] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] raise exception.PortBindingFailed(port_id=port['id']) [ 626.808381] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] nova.exception.PortBindingFailed: Binding failed for port de7507a7-5aad-4849-8609-8ff49b3f040e, please check neutron logs for more information. [ 626.808381] env[62070]: ERROR nova.compute.manager [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] [ 626.810367] env[62070]: DEBUG nova.compute.utils [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Binding failed for port de7507a7-5aad-4849-8609-8ff49b3f040e, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 626.810367] env[62070]: DEBUG oslo_concurrency.lockutils [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.068s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.822028] env[62070]: DEBUG nova.compute.manager [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Build of instance 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a was re-scheduled: Binding failed for port de7507a7-5aad-4849-8609-8ff49b3f040e, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 626.822028] env[62070]: DEBUG nova.compute.manager [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 626.822028] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Acquiring lock "refresh_cache-7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 626.822192] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Acquired lock "refresh_cache-7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.822281] env[62070]: DEBUG nova.network.neutron [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 626.829440] env[62070]: DEBUG nova.compute.manager [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 626.874061] env[62070]: DEBUG nova.virt.hardware [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 626.874329] env[62070]: DEBUG nova.virt.hardware [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 626.874503] env[62070]: DEBUG nova.virt.hardware [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 626.874678] env[62070]: DEBUG nova.virt.hardware [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 626.874828] env[62070]: DEBUG nova.virt.hardware [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 626.874972] env[62070]: DEBUG nova.virt.hardware [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 626.875227] env[62070]: DEBUG nova.virt.hardware [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 626.875365] env[62070]: DEBUG nova.virt.hardware [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 626.875538] env[62070]: DEBUG nova.virt.hardware [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 626.875756] env[62070]: DEBUG nova.virt.hardware [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 626.875864] env[62070]: DEBUG nova.virt.hardware [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 626.877013] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-475cc20b-4b61-4741-a7eb-b35e943d3247 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.885719] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f29bc7-f51b-4c73-8172-4e54fd273c5d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.053994] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.380749] env[62070]: DEBUG nova.network.neutron [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 627.605930] env[62070]: DEBUG nova.network.neutron [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.714542] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829ba509-6b21-467f-8b31-396f02a09bd1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.722285] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea8dccdf-3c25-44c8-adf1-4e8e8701b399 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.759046] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315e45da-74ee-434f-9514-a2c5623afed8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.765593] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a61e21-ce98-46e5-820f-1e13bc43199b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.781493] env[62070]: DEBUG nova.compute.provider_tree [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 628.089731] env[62070]: DEBUG nova.compute.manager [req-24462fed-cafb-404c-803a-12ead103fc83 req-2ada4fc5-f99a-4851-9394-d3261c7bfc44 service nova] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Received event network-changed-676dde2d-c1da-454a-a873-1984ec64fadb {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 628.089731] env[62070]: DEBUG nova.compute.manager [req-24462fed-cafb-404c-803a-12ead103fc83 req-2ada4fc5-f99a-4851-9394-d3261c7bfc44 service nova] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Refreshing instance network info cache due to event network-changed-676dde2d-c1da-454a-a873-1984ec64fadb. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 628.089897] env[62070]: DEBUG oslo_concurrency.lockutils [req-24462fed-cafb-404c-803a-12ead103fc83 req-2ada4fc5-f99a-4851-9394-d3261c7bfc44 service nova] Acquiring lock "refresh_cache-73ab65b7-32e7-4206-8f31-466085319c71" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.091042] env[62070]: DEBUG oslo_concurrency.lockutils [req-24462fed-cafb-404c-803a-12ead103fc83 req-2ada4fc5-f99a-4851-9394-d3261c7bfc44 service nova] Acquired lock "refresh_cache-73ab65b7-32e7-4206-8f31-466085319c71" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.091042] env[62070]: DEBUG nova.network.neutron [req-24462fed-cafb-404c-803a-12ead103fc83 req-2ada4fc5-f99a-4851-9394-d3261c7bfc44 service nova] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Refreshing network info cache for port 676dde2d-c1da-454a-a873-1984ec64fadb {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 628.108396] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Releasing lock "refresh_cache-7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 628.108936] env[62070]: DEBUG nova.compute.manager [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 628.108936] env[62070]: DEBUG nova.compute.manager [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 628.109052] env[62070]: DEBUG nova.network.neutron [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 628.259905] env[62070]: ERROR nova.compute.manager [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 676dde2d-c1da-454a-a873-1984ec64fadb, please check neutron logs for more information. [ 628.259905] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 628.259905] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 628.259905] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 628.259905] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 628.259905] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 628.259905] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 628.259905] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 628.259905] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 628.259905] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 628.259905] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 628.259905] env[62070]: ERROR nova.compute.manager raise self.value [ 628.259905] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 628.259905] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 628.259905] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 628.259905] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 628.260374] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 628.260374] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 628.260374] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 676dde2d-c1da-454a-a873-1984ec64fadb, please check neutron logs for more information. [ 628.260374] env[62070]: ERROR nova.compute.manager [ 628.260374] env[62070]: Traceback (most recent call last): [ 628.260374] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 628.260374] env[62070]: listener.cb(fileno) [ 628.260374] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 628.260374] env[62070]: result = function(*args, **kwargs) [ 628.260374] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 628.260374] env[62070]: return func(*args, **kwargs) [ 628.260374] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 628.260374] env[62070]: raise e [ 628.260374] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 628.260374] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 628.260374] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 628.260374] env[62070]: created_port_ids = self._update_ports_for_instance( [ 628.260374] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 628.260374] env[62070]: with excutils.save_and_reraise_exception(): [ 628.260374] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 628.260374] env[62070]: self.force_reraise() [ 628.260374] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 628.260374] env[62070]: raise self.value [ 628.260374] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 628.260374] env[62070]: updated_port = self._update_port( [ 628.260374] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 628.260374] env[62070]: _ensure_no_port_binding_failure(port) [ 628.260374] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 628.260374] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 628.261032] env[62070]: nova.exception.PortBindingFailed: Binding failed for port 676dde2d-c1da-454a-a873-1984ec64fadb, please check neutron logs for more information. [ 628.261032] env[62070]: Removing descriptor: 16 [ 628.261032] env[62070]: ERROR nova.compute.manager [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 676dde2d-c1da-454a-a873-1984ec64fadb, please check neutron logs for more information. [ 628.261032] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Traceback (most recent call last): [ 628.261032] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 628.261032] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] yield resources [ 628.261032] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 628.261032] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] self.driver.spawn(context, instance, image_meta, [ 628.261032] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 628.261032] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] self._vmops.spawn(context, instance, image_meta, injected_files, [ 628.261032] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 628.261032] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] vm_ref = self.build_virtual_machine(instance, [ 628.261284] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 628.261284] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] vif_infos = vmwarevif.get_vif_info(self._session, [ 628.261284] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 628.261284] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] for vif in network_info: [ 628.261284] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 628.261284] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] return self._sync_wrapper(fn, *args, **kwargs) [ 628.261284] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 628.261284] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] self.wait() [ 628.261284] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 628.261284] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] self[:] = self._gt.wait() [ 628.261284] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 628.261284] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] return self._exit_event.wait() [ 628.261284] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 628.261551] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] result = hub.switch() [ 628.261551] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 628.261551] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] return self.greenlet.switch() [ 628.261551] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 628.261551] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] result = function(*args, **kwargs) [ 628.261551] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 628.261551] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] return func(*args, **kwargs) [ 628.261551] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 628.261551] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] raise e [ 628.261551] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 628.261551] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] nwinfo = self.network_api.allocate_for_instance( [ 628.261551] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 628.261551] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] created_port_ids = self._update_ports_for_instance( [ 628.261820] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 628.261820] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] with excutils.save_and_reraise_exception(): [ 628.261820] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 628.261820] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] self.force_reraise() [ 628.261820] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 628.261820] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] raise self.value [ 628.261820] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 628.261820] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] updated_port = self._update_port( [ 628.261820] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 628.261820] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] _ensure_no_port_binding_failure(port) [ 628.261820] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 628.261820] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] raise exception.PortBindingFailed(port_id=port['id']) [ 628.262145] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] nova.exception.PortBindingFailed: Binding failed for port 676dde2d-c1da-454a-a873-1984ec64fadb, please check neutron logs for more information. [ 628.262145] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] [ 628.262145] env[62070]: INFO nova.compute.manager [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Terminating instance [ 628.263970] env[62070]: DEBUG oslo_concurrency.lockutils [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "refresh_cache-73ab65b7-32e7-4206-8f31-466085319c71" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.278039] env[62070]: DEBUG nova.network.neutron [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 628.284516] env[62070]: DEBUG nova.scheduler.client.report [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 628.621529] env[62070]: DEBUG nova.network.neutron [req-24462fed-cafb-404c-803a-12ead103fc83 req-2ada4fc5-f99a-4851-9394-d3261c7bfc44 service nova] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 628.781213] env[62070]: DEBUG nova.network.neutron [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.795644] env[62070]: DEBUG oslo_concurrency.lockutils [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.982s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.795644] env[62070]: ERROR nova.compute.manager [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9037323c-72d6-4bfa-ad3e-0a3d37347560, please check neutron logs for more information. [ 628.795644] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Traceback (most recent call last): [ 628.795644] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 628.795644] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] self.driver.spawn(context, instance, image_meta, [ 628.795644] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 628.795644] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] self._vmops.spawn(context, instance, image_meta, injected_files, [ 628.795644] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 628.795644] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] vm_ref = self.build_virtual_machine(instance, [ 628.795993] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 628.795993] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] vif_infos = vmwarevif.get_vif_info(self._session, [ 628.795993] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 628.795993] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] for vif in network_info: [ 628.795993] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 628.795993] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] return self._sync_wrapper(fn, *args, **kwargs) [ 628.795993] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 628.795993] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] self.wait() [ 628.795993] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 628.795993] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] self[:] = self._gt.wait() [ 628.795993] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 628.795993] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] return self._exit_event.wait() [ 628.795993] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 628.796292] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] result = hub.switch() [ 628.796292] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 628.796292] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] return self.greenlet.switch() [ 628.796292] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 628.796292] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] result = function(*args, **kwargs) [ 628.796292] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 628.796292] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] return func(*args, **kwargs) [ 628.796292] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 628.796292] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] raise e [ 628.796292] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 628.796292] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] nwinfo = self.network_api.allocate_for_instance( [ 628.796292] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 628.796292] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] created_port_ids = self._update_ports_for_instance( [ 628.796596] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 628.796596] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] with excutils.save_and_reraise_exception(): [ 628.796596] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 628.796596] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] self.force_reraise() [ 628.796596] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 628.796596] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] raise self.value [ 628.796596] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 628.796596] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] updated_port = self._update_port( [ 628.796596] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 628.796596] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] _ensure_no_port_binding_failure(port) [ 628.796596] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 628.796596] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] raise exception.PortBindingFailed(port_id=port['id']) [ 628.796909] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] nova.exception.PortBindingFailed: Binding failed for port 9037323c-72d6-4bfa-ad3e-0a3d37347560, please check neutron logs for more information. [ 628.796909] env[62070]: ERROR nova.compute.manager [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] [ 628.796909] env[62070]: DEBUG nova.compute.utils [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Binding failed for port 9037323c-72d6-4bfa-ad3e-0a3d37347560, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 628.798056] env[62070]: DEBUG oslo_concurrency.lockutils [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.155s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.801419] env[62070]: INFO nova.compute.claims [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 628.805170] env[62070]: DEBUG nova.compute.manager [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Build of instance 2226072d-16f2-4ea1-a56c-d866554c7379 was re-scheduled: Binding failed for port 9037323c-72d6-4bfa-ad3e-0a3d37347560, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 628.806392] env[62070]: DEBUG nova.compute.manager [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 628.809301] env[62070]: DEBUG oslo_concurrency.lockutils [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Acquiring lock "refresh_cache-2226072d-16f2-4ea1-a56c-d866554c7379" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.809474] env[62070]: DEBUG oslo_concurrency.lockutils [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Acquired lock "refresh_cache-2226072d-16f2-4ea1-a56c-d866554c7379" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.809643] env[62070]: DEBUG nova.network.neutron [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 628.838613] env[62070]: DEBUG nova.network.neutron [req-24462fed-cafb-404c-803a-12ead103fc83 req-2ada4fc5-f99a-4851-9394-d3261c7bfc44 service nova] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.283820] env[62070]: INFO nova.compute.manager [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] [instance: 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a] Took 1.17 seconds to deallocate network for instance. [ 629.340432] env[62070]: DEBUG nova.network.neutron [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 629.342420] env[62070]: DEBUG oslo_concurrency.lockutils [req-24462fed-cafb-404c-803a-12ead103fc83 req-2ada4fc5-f99a-4851-9394-d3261c7bfc44 service nova] Releasing lock "refresh_cache-73ab65b7-32e7-4206-8f31-466085319c71" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 629.344568] env[62070]: DEBUG oslo_concurrency.lockutils [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired lock "refresh_cache-73ab65b7-32e7-4206-8f31-466085319c71" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.346391] env[62070]: DEBUG nova.network.neutron [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 629.423083] env[62070]: DEBUG nova.network.neutron [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.898537] env[62070]: DEBUG nova.network.neutron [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 629.927902] env[62070]: DEBUG oslo_concurrency.lockutils [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Releasing lock "refresh_cache-2226072d-16f2-4ea1-a56c-d866554c7379" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 629.927902] env[62070]: DEBUG nova.compute.manager [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 629.927902] env[62070]: DEBUG nova.compute.manager [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 629.927902] env[62070]: DEBUG nova.network.neutron [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 629.968621] env[62070]: DEBUG nova.network.neutron [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 630.154601] env[62070]: DEBUG nova.network.neutron [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.171423] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "1c1730e5-88af-4c7f-8bcc-d494db2cd723" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 630.171423] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "1c1730e5-88af-4c7f-8bcc-d494db2cd723" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.214544] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "076aed5b-4b08-4f3b-a940-d9cd95c32e57" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 630.216351] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "076aed5b-4b08-4f3b-a940-d9cd95c32e57" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.299619] env[62070]: DEBUG nova.compute.manager [req-2ff7b5ad-3949-43c3-b305-1a90c73aeb8c req-98576c08-c832-4829-a28d-3dd23582e449 service nova] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Received event network-vif-deleted-676dde2d-c1da-454a-a873-1984ec64fadb {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 630.334134] env[62070]: INFO nova.scheduler.client.report [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Deleted allocations for instance 7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a [ 630.394766] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53181016-f9b5-449a-8719-ac897cdf3b24 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.415162] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaf3cd6c-798d-4590-a1f8-e5fd8e985db1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.455895] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b25652c-2783-4cc1-a877-a96c7c5286a4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.466677] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e8dc3f-507a-4e80-8c1e-3090795e9d54 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.471467] env[62070]: DEBUG nova.network.neutron [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.488754] env[62070]: DEBUG nova.compute.provider_tree [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 630.660105] env[62070]: DEBUG oslo_concurrency.lockutils [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Releasing lock "refresh_cache-73ab65b7-32e7-4206-8f31-466085319c71" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.660576] env[62070]: DEBUG nova.compute.manager [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 630.660770] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 630.661103] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8c593c4d-5bac-49e3-9ce6-a44b6aa1415a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.671336] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679d3fb1-c03d-46a5-a34d-79aaf57a4aea {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.696044] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 73ab65b7-32e7-4206-8f31-466085319c71 could not be found. [ 630.697608] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 630.697608] env[62070]: INFO nova.compute.manager [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Took 0.04 seconds to destroy the instance on the hypervisor. [ 630.697608] env[62070]: DEBUG oslo.service.loopingcall [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 630.697608] env[62070]: DEBUG nova.compute.manager [-] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 630.697608] env[62070]: DEBUG nova.network.neutron [-] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 630.723496] env[62070]: DEBUG nova.network.neutron [-] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 630.855716] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d154b157-ec97-49c4-b004-fc679da21516 tempest-ServersWithSpecificFlavorTestJSON-1618728433 tempest-ServersWithSpecificFlavorTestJSON-1618728433-project-member] Lock "7d8d4cdc-2b25-46f3-8bc0-1de22fdd005a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.621s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 630.975031] env[62070]: INFO nova.compute.manager [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] [instance: 2226072d-16f2-4ea1-a56c-d866554c7379] Took 1.05 seconds to deallocate network for instance. [ 630.993383] env[62070]: DEBUG nova.scheduler.client.report [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 631.226624] env[62070]: DEBUG nova.network.neutron [-] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.358231] env[62070]: DEBUG nova.compute.manager [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 631.499380] env[62070]: DEBUG oslo_concurrency.lockutils [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.701s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 631.500439] env[62070]: DEBUG nova.compute.manager [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 631.502657] env[62070]: DEBUG oslo_concurrency.lockutils [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.692s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.732493] env[62070]: INFO nova.compute.manager [-] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Took 1.03 seconds to deallocate network for instance. [ 631.736853] env[62070]: DEBUG nova.compute.claims [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 631.737200] env[62070]: DEBUG oslo_concurrency.lockutils [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.892670] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 632.011482] env[62070]: DEBUG nova.compute.utils [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 632.014888] env[62070]: DEBUG nova.compute.manager [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 632.017148] env[62070]: DEBUG nova.network.neutron [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 632.028021] env[62070]: INFO nova.scheduler.client.report [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Deleted allocations for instance 2226072d-16f2-4ea1-a56c-d866554c7379 [ 632.071884] env[62070]: DEBUG nova.policy [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '18c6f9b2835541d0912178bc3131e0e5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b16226be275240f9926e184adef3f8b9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 632.426962] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ca5fa2-281b-4af3-b1f3-6e77a7304828 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.435053] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad36971-db61-4b3c-aae7-44115dd14404 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.471807] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e56c6dbc-9d34-483b-9763-dbf849c9636c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.479473] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5116ab68-0af3-4d32-a1b7-323d560cd0f1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.495607] env[62070]: DEBUG nova.compute.provider_tree [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 632.521861] env[62070]: DEBUG nova.compute.manager [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 632.539482] env[62070]: DEBUG oslo_concurrency.lockutils [None req-235f2fca-0298-44f0-b867-2cd232992478 tempest-TenantUsagesTestJSON-193686055 tempest-TenantUsagesTestJSON-193686055-project-member] Lock "2226072d-16f2-4ea1-a56c-d866554c7379" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.069s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 632.674215] env[62070]: DEBUG nova.network.neutron [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Successfully created port: e004e4fb-3439-483e-936e-b19dd561ecb0 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 633.000874] env[62070]: DEBUG nova.scheduler.client.report [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 633.043967] env[62070]: DEBUG nova.compute.manager [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 633.506412] env[62070]: DEBUG oslo_concurrency.lockutils [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.004s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 633.507580] env[62070]: ERROR nova.compute.manager [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8016a15a-fbb1-4dfd-8dba-234cee68c43c, please check neutron logs for more information. [ 633.507580] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Traceback (most recent call last): [ 633.507580] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 633.507580] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] self.driver.spawn(context, instance, image_meta, [ 633.507580] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 633.507580] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 633.507580] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 633.507580] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] vm_ref = self.build_virtual_machine(instance, [ 633.507580] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 633.507580] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] vif_infos = vmwarevif.get_vif_info(self._session, [ 633.507580] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 633.508139] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] for vif in network_info: [ 633.508139] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 633.508139] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] return self._sync_wrapper(fn, *args, **kwargs) [ 633.508139] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 633.508139] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] self.wait() [ 633.508139] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 633.508139] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] self[:] = self._gt.wait() [ 633.508139] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 633.508139] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] return self._exit_event.wait() [ 633.508139] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 633.508139] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] result = hub.switch() [ 633.508139] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 633.508139] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] return self.greenlet.switch() [ 633.508501] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 633.508501] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] result = function(*args, **kwargs) [ 633.508501] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 633.508501] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] return func(*args, **kwargs) [ 633.508501] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 633.508501] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] raise e [ 633.508501] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 633.508501] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] nwinfo = self.network_api.allocate_for_instance( [ 633.508501] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 633.508501] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] created_port_ids = self._update_ports_for_instance( [ 633.508501] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 633.508501] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] with excutils.save_and_reraise_exception(): [ 633.508501] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 633.508784] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] self.force_reraise() [ 633.508784] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 633.508784] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] raise self.value [ 633.508784] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 633.508784] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] updated_port = self._update_port( [ 633.508784] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 633.508784] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] _ensure_no_port_binding_failure(port) [ 633.508784] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 633.508784] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] raise exception.PortBindingFailed(port_id=port['id']) [ 633.508784] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] nova.exception.PortBindingFailed: Binding failed for port 8016a15a-fbb1-4dfd-8dba-234cee68c43c, please check neutron logs for more information. [ 633.508784] env[62070]: ERROR nova.compute.manager [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] [ 633.509809] env[62070]: DEBUG nova.compute.utils [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Binding failed for port 8016a15a-fbb1-4dfd-8dba-234cee68c43c, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 633.510638] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.339s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 633.513712] env[62070]: DEBUG nova.compute.manager [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Build of instance d8478b63-3a62-4afa-950b-edf9774e8ea8 was re-scheduled: Binding failed for port 8016a15a-fbb1-4dfd-8dba-234cee68c43c, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 633.514358] env[62070]: DEBUG nova.compute.manager [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 633.514662] env[62070]: DEBUG oslo_concurrency.lockutils [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Acquiring lock "refresh_cache-d8478b63-3a62-4afa-950b-edf9774e8ea8" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.514819] env[62070]: DEBUG oslo_concurrency.lockutils [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Acquired lock "refresh_cache-d8478b63-3a62-4afa-950b-edf9774e8ea8" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.518052] env[62070]: DEBUG nova.network.neutron [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 633.537091] env[62070]: DEBUG nova.compute.manager [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 633.576960] env[62070]: DEBUG nova.virt.hardware [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 633.577228] env[62070]: DEBUG nova.virt.hardware [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 633.577383] env[62070]: DEBUG nova.virt.hardware [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 633.577642] env[62070]: DEBUG nova.virt.hardware [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 633.578700] env[62070]: DEBUG nova.virt.hardware [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 633.578907] env[62070]: DEBUG nova.virt.hardware [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 633.579133] env[62070]: DEBUG nova.virt.hardware [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 633.579253] env[62070]: DEBUG nova.virt.hardware [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 633.579479] env[62070]: DEBUG nova.virt.hardware [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 633.580033] env[62070]: DEBUG nova.virt.hardware [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 633.580236] env[62070]: DEBUG nova.virt.hardware [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 633.582441] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-061535b7-41ba-40dd-8395-8f6287d80650 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.585951] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 633.596026] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ad64e43-9d81-4a0b-8b4a-d0c8e22dc48a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.053468] env[62070]: DEBUG nova.network.neutron [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 634.132236] env[62070]: ERROR nova.compute.manager [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e004e4fb-3439-483e-936e-b19dd561ecb0, please check neutron logs for more information. [ 634.132236] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 634.132236] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 634.132236] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 634.132236] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 634.132236] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 634.132236] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 634.132236] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 634.132236] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 634.132236] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 634.132236] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 634.132236] env[62070]: ERROR nova.compute.manager raise self.value [ 634.132236] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 634.132236] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 634.132236] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 634.132236] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 634.132673] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 634.132673] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 634.132673] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e004e4fb-3439-483e-936e-b19dd561ecb0, please check neutron logs for more information. [ 634.132673] env[62070]: ERROR nova.compute.manager [ 634.132673] env[62070]: Traceback (most recent call last): [ 634.132673] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 634.132673] env[62070]: listener.cb(fileno) [ 634.132673] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 634.132673] env[62070]: result = function(*args, **kwargs) [ 634.132673] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 634.132673] env[62070]: return func(*args, **kwargs) [ 634.132673] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 634.132673] env[62070]: raise e [ 634.132673] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 634.132673] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 634.132673] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 634.132673] env[62070]: created_port_ids = self._update_ports_for_instance( [ 634.132673] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 634.132673] env[62070]: with excutils.save_and_reraise_exception(): [ 634.132673] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 634.132673] env[62070]: self.force_reraise() [ 634.132673] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 634.132673] env[62070]: raise self.value [ 634.132673] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 634.132673] env[62070]: updated_port = self._update_port( [ 634.132673] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 634.132673] env[62070]: _ensure_no_port_binding_failure(port) [ 634.132673] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 634.132673] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 634.133433] env[62070]: nova.exception.PortBindingFailed: Binding failed for port e004e4fb-3439-483e-936e-b19dd561ecb0, please check neutron logs for more information. [ 634.133433] env[62070]: Removing descriptor: 14 [ 634.134207] env[62070]: ERROR nova.compute.manager [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e004e4fb-3439-483e-936e-b19dd561ecb0, please check neutron logs for more information. [ 634.134207] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Traceback (most recent call last): [ 634.134207] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 634.134207] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] yield resources [ 634.134207] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 634.134207] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] self.driver.spawn(context, instance, image_meta, [ 634.134207] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 634.134207] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] self._vmops.spawn(context, instance, image_meta, injected_files, [ 634.134207] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 634.134207] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] vm_ref = self.build_virtual_machine(instance, [ 634.134207] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 634.134472] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] vif_infos = vmwarevif.get_vif_info(self._session, [ 634.134472] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 634.134472] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] for vif in network_info: [ 634.134472] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 634.134472] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] return self._sync_wrapper(fn, *args, **kwargs) [ 634.134472] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 634.134472] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] self.wait() [ 634.134472] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 634.134472] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] self[:] = self._gt.wait() [ 634.134472] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 634.134472] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] return self._exit_event.wait() [ 634.134472] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 634.134472] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] result = hub.switch() [ 634.134814] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 634.134814] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] return self.greenlet.switch() [ 634.134814] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 634.134814] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] result = function(*args, **kwargs) [ 634.134814] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 634.134814] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] return func(*args, **kwargs) [ 634.134814] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 634.134814] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] raise e [ 634.134814] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 634.134814] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] nwinfo = self.network_api.allocate_for_instance( [ 634.134814] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 634.134814] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] created_port_ids = self._update_ports_for_instance( [ 634.134814] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 634.135099] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] with excutils.save_and_reraise_exception(): [ 634.135099] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 634.135099] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] self.force_reraise() [ 634.135099] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 634.135099] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] raise self.value [ 634.135099] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 634.135099] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] updated_port = self._update_port( [ 634.135099] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 634.135099] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] _ensure_no_port_binding_failure(port) [ 634.135099] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 634.135099] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] raise exception.PortBindingFailed(port_id=port['id']) [ 634.135099] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] nova.exception.PortBindingFailed: Binding failed for port e004e4fb-3439-483e-936e-b19dd561ecb0, please check neutron logs for more information. [ 634.135099] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] [ 634.135382] env[62070]: INFO nova.compute.manager [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Terminating instance [ 634.143667] env[62070]: DEBUG oslo_concurrency.lockutils [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Acquiring lock "refresh_cache-73cb4a44-a4d9-4c8c-8472-f99b5d449cec" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.143667] env[62070]: DEBUG oslo_concurrency.lockutils [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Acquired lock "refresh_cache-73cb4a44-a4d9-4c8c-8472-f99b5d449cec" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.145135] env[62070]: DEBUG nova.network.neutron [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 634.264131] env[62070]: DEBUG nova.network.neutron [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.327433] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquiring lock "fe378560-40b8-42c9-840d-b7d60de87c4d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.328026] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "fe378560-40b8-42c9-840d-b7d60de87c4d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.360583] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquiring lock "bcafa04d-904b-4eab-aba1-35180c2d4b22" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.360832] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "bcafa04d-904b-4eab-aba1-35180c2d4b22" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.388902] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquiring lock "5a146d8f-6921-4b3e-8696-d2804fb855ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.388902] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "5a146d8f-6921-4b3e-8696-d2804fb855ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.508115] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ee4f5f-f79f-40bd-bef5-69c0f85e6f09 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.513634] env[62070]: DEBUG nova.compute.manager [req-946d211f-f558-4a21-ab68-fad93a0a062d req-f7cacf7a-aec3-4dc9-a988-ddd7baf789d7 service nova] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Received event network-changed-e004e4fb-3439-483e-936e-b19dd561ecb0 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 634.513915] env[62070]: DEBUG nova.compute.manager [req-946d211f-f558-4a21-ab68-fad93a0a062d req-f7cacf7a-aec3-4dc9-a988-ddd7baf789d7 service nova] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Refreshing instance network info cache due to event network-changed-e004e4fb-3439-483e-936e-b19dd561ecb0. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 634.514036] env[62070]: DEBUG oslo_concurrency.lockutils [req-946d211f-f558-4a21-ab68-fad93a0a062d req-f7cacf7a-aec3-4dc9-a988-ddd7baf789d7 service nova] Acquiring lock "refresh_cache-73cb4a44-a4d9-4c8c-8472-f99b5d449cec" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.520269] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a6628ff-aaca-4fdf-a1ec-e2f27e0f1009 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.556980] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a45f0f3-fba3-4140-bf0d-2e8143df5797 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.567346] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0681e774-7516-445c-97c9-34e247e26b2c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.588707] env[62070]: DEBUG nova.compute.provider_tree [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 634.668753] env[62070]: DEBUG nova.network.neutron [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 634.772029] env[62070]: DEBUG oslo_concurrency.lockutils [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Releasing lock "refresh_cache-d8478b63-3a62-4afa-950b-edf9774e8ea8" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 634.772029] env[62070]: DEBUG nova.compute.manager [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 634.772029] env[62070]: DEBUG nova.compute.manager [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 634.772029] env[62070]: DEBUG nova.network.neutron [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 634.792111] env[62070]: DEBUG nova.network.neutron [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 634.835737] env[62070]: DEBUG nova.network.neutron [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.091313] env[62070]: DEBUG nova.scheduler.client.report [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 635.296273] env[62070]: DEBUG nova.network.neutron [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.345249] env[62070]: DEBUG oslo_concurrency.lockutils [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Releasing lock "refresh_cache-73cb4a44-a4d9-4c8c-8472-f99b5d449cec" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.345778] env[62070]: DEBUG nova.compute.manager [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 635.345966] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 635.346288] env[62070]: DEBUG oslo_concurrency.lockutils [req-946d211f-f558-4a21-ab68-fad93a0a062d req-f7cacf7a-aec3-4dc9-a988-ddd7baf789d7 service nova] Acquired lock "refresh_cache-73cb4a44-a4d9-4c8c-8472-f99b5d449cec" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.346460] env[62070]: DEBUG nova.network.neutron [req-946d211f-f558-4a21-ab68-fad93a0a062d req-f7cacf7a-aec3-4dc9-a988-ddd7baf789d7 service nova] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Refreshing network info cache for port e004e4fb-3439-483e-936e-b19dd561ecb0 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 635.347498] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9bf45a46-eda9-4d7b-924a-04ef9e279cb2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.359432] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-833f5f3d-6a30-4bd7-9685-1b7436b09e75 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.384349] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 73cb4a44-a4d9-4c8c-8472-f99b5d449cec could not be found. [ 635.384788] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 635.385038] env[62070]: INFO nova.compute.manager [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Took 0.04 seconds to destroy the instance on the hypervisor. [ 635.385337] env[62070]: DEBUG oslo.service.loopingcall [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 635.386290] env[62070]: DEBUG nova.compute.manager [-] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 635.386424] env[62070]: DEBUG nova.network.neutron [-] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 635.414034] env[62070]: DEBUG nova.network.neutron [-] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 635.598443] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.088s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 635.599038] env[62070]: ERROR nova.compute.manager [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port df0d0694-d2de-4148-8d6d-2700c2ec9a24, please check neutron logs for more information. [ 635.599038] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Traceback (most recent call last): [ 635.599038] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 635.599038] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] self.driver.spawn(context, instance, image_meta, [ 635.599038] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 635.599038] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 635.599038] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 635.599038] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] vm_ref = self.build_virtual_machine(instance, [ 635.599038] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 635.599038] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] vif_infos = vmwarevif.get_vif_info(self._session, [ 635.599038] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 635.599488] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] for vif in network_info: [ 635.599488] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 635.599488] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] return self._sync_wrapper(fn, *args, **kwargs) [ 635.599488] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 635.599488] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] self.wait() [ 635.599488] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 635.599488] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] self[:] = self._gt.wait() [ 635.599488] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 635.599488] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] return self._exit_event.wait() [ 635.599488] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 635.599488] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] result = hub.switch() [ 635.599488] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 635.599488] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] return self.greenlet.switch() [ 635.599776] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 635.599776] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] result = function(*args, **kwargs) [ 635.599776] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 635.599776] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] return func(*args, **kwargs) [ 635.599776] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 635.599776] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] raise e [ 635.599776] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 635.599776] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] nwinfo = self.network_api.allocate_for_instance( [ 635.599776] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 635.599776] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] created_port_ids = self._update_ports_for_instance( [ 635.599776] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 635.599776] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] with excutils.save_and_reraise_exception(): [ 635.599776] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 635.600156] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] self.force_reraise() [ 635.600156] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 635.600156] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] raise self.value [ 635.600156] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 635.600156] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] updated_port = self._update_port( [ 635.600156] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 635.600156] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] _ensure_no_port_binding_failure(port) [ 635.600156] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 635.600156] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] raise exception.PortBindingFailed(port_id=port['id']) [ 635.600156] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] nova.exception.PortBindingFailed: Binding failed for port df0d0694-d2de-4148-8d6d-2700c2ec9a24, please check neutron logs for more information. [ 635.600156] env[62070]: ERROR nova.compute.manager [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] [ 635.600450] env[62070]: DEBUG nova.compute.utils [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Binding failed for port df0d0694-d2de-4148-8d6d-2700c2ec9a24, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 635.600965] env[62070]: DEBUG oslo_concurrency.lockutils [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.250s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.607020] env[62070]: DEBUG nova.compute.manager [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Build of instance d7a90be3-d3d6-4626-944b-b907cf7fb64d was re-scheduled: Binding failed for port df0d0694-d2de-4148-8d6d-2700c2ec9a24, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 635.607020] env[62070]: DEBUG nova.compute.manager [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 635.607020] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Acquiring lock "refresh_cache-d7a90be3-d3d6-4626-944b-b907cf7fb64d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 635.607020] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Acquired lock "refresh_cache-d7a90be3-d3d6-4626-944b-b907cf7fb64d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.607274] env[62070]: DEBUG nova.network.neutron [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 635.802023] env[62070]: INFO nova.compute.manager [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] [instance: d8478b63-3a62-4afa-950b-edf9774e8ea8] Took 1.03 seconds to deallocate network for instance. [ 635.871283] env[62070]: DEBUG nova.network.neutron [req-946d211f-f558-4a21-ab68-fad93a0a062d req-f7cacf7a-aec3-4dc9-a988-ddd7baf789d7 service nova] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 635.918341] env[62070]: DEBUG nova.network.neutron [-] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.972785] env[62070]: DEBUG nova.network.neutron [req-946d211f-f558-4a21-ab68-fad93a0a062d req-f7cacf7a-aec3-4dc9-a988-ddd7baf789d7 service nova] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.140438] env[62070]: DEBUG nova.network.neutron [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 636.338266] env[62070]: DEBUG nova.network.neutron [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.421153] env[62070]: INFO nova.compute.manager [-] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Took 1.03 seconds to deallocate network for instance. [ 636.422856] env[62070]: DEBUG nova.compute.claims [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 636.423164] env[62070]: DEBUG oslo_concurrency.lockutils [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.475619] env[62070]: DEBUG oslo_concurrency.lockutils [req-946d211f-f558-4a21-ab68-fad93a0a062d req-f7cacf7a-aec3-4dc9-a988-ddd7baf789d7 service nova] Releasing lock "refresh_cache-73cb4a44-a4d9-4c8c-8472-f99b5d449cec" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.545878] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae4895b-7d17-4167-a219-a515eb1a87cf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.553674] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb47bfb-3c33-4ebc-940d-050c925ecf8e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.588053] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8305c08e-9e2d-4569-9fb0-b3d70a456066 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.594916] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a05d2c18-c83a-481f-a2e7-659c59eb6088 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.601027] env[62070]: DEBUG nova.compute.manager [req-f08b740e-ce1c-4de5-8ad7-cf3ab038f455 req-ad4597eb-7640-4d5b-be73-df158bca57d1 service nova] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Received event network-vif-deleted-e004e4fb-3439-483e-936e-b19dd561ecb0 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 636.611059] env[62070]: DEBUG nova.compute.provider_tree [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 636.845034] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Releasing lock "refresh_cache-d7a90be3-d3d6-4626-944b-b907cf7fb64d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.846016] env[62070]: DEBUG nova.compute.manager [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 636.846016] env[62070]: DEBUG nova.compute.manager [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 636.846016] env[62070]: DEBUG nova.network.neutron [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 636.855354] env[62070]: INFO nova.scheduler.client.report [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Deleted allocations for instance d8478b63-3a62-4afa-950b-edf9774e8ea8 [ 636.866129] env[62070]: DEBUG nova.network.neutron [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 637.115085] env[62070]: DEBUG nova.scheduler.client.report [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 637.369457] env[62070]: DEBUG oslo_concurrency.lockutils [None req-67d8c644-2517-46bc-9d7f-c0ef47d3d2eb tempest-ImagesOneServerNegativeTestJSON-1867967444 tempest-ImagesOneServerNegativeTestJSON-1867967444-project-member] Lock "d8478b63-3a62-4afa-950b-edf9774e8ea8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.146s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.369810] env[62070]: DEBUG nova.network.neutron [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.619893] env[62070]: DEBUG oslo_concurrency.lockutils [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.019s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.620532] env[62070]: ERROR nova.compute.manager [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1cce7a44-4a4c-4293-a7af-31a45ca20632, please check neutron logs for more information. [ 637.620532] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Traceback (most recent call last): [ 637.620532] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 637.620532] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] self.driver.spawn(context, instance, image_meta, [ 637.620532] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 637.620532] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] self._vmops.spawn(context, instance, image_meta, injected_files, [ 637.620532] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 637.620532] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] vm_ref = self.build_virtual_machine(instance, [ 637.620532] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 637.620532] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] vif_infos = vmwarevif.get_vif_info(self._session, [ 637.620532] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 637.620860] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] for vif in network_info: [ 637.620860] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 637.620860] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] return self._sync_wrapper(fn, *args, **kwargs) [ 637.620860] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 637.620860] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] self.wait() [ 637.620860] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 637.620860] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] self[:] = self._gt.wait() [ 637.620860] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 637.620860] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] return self._exit_event.wait() [ 637.620860] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 637.620860] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] result = hub.switch() [ 637.620860] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 637.620860] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] return self.greenlet.switch() [ 637.621206] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 637.621206] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] result = function(*args, **kwargs) [ 637.621206] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 637.621206] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] return func(*args, **kwargs) [ 637.621206] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 637.621206] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] raise e [ 637.621206] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 637.621206] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] nwinfo = self.network_api.allocate_for_instance( [ 637.621206] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 637.621206] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] created_port_ids = self._update_ports_for_instance( [ 637.621206] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 637.621206] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] with excutils.save_and_reraise_exception(): [ 637.621206] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 637.621563] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] self.force_reraise() [ 637.621563] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 637.621563] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] raise self.value [ 637.621563] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 637.621563] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] updated_port = self._update_port( [ 637.621563] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 637.621563] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] _ensure_no_port_binding_failure(port) [ 637.621563] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 637.621563] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] raise exception.PortBindingFailed(port_id=port['id']) [ 637.621563] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] nova.exception.PortBindingFailed: Binding failed for port 1cce7a44-4a4c-4293-a7af-31a45ca20632, please check neutron logs for more information. [ 637.621563] env[62070]: ERROR nova.compute.manager [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] [ 637.621877] env[62070]: DEBUG nova.compute.utils [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Binding failed for port 1cce7a44-4a4c-4293-a7af-31a45ca20632, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 637.622593] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.205s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.624016] env[62070]: INFO nova.compute.claims [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 637.627532] env[62070]: DEBUG nova.compute.manager [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Build of instance ad0dd218-5e45-4d22-9d94-5c25ba8b22ec was re-scheduled: Binding failed for port 1cce7a44-4a4c-4293-a7af-31a45ca20632, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 637.627973] env[62070]: DEBUG nova.compute.manager [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 637.628211] env[62070]: DEBUG oslo_concurrency.lockutils [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Acquiring lock "refresh_cache-ad0dd218-5e45-4d22-9d94-5c25ba8b22ec" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 637.628356] env[62070]: DEBUG oslo_concurrency.lockutils [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Acquired lock "refresh_cache-ad0dd218-5e45-4d22-9d94-5c25ba8b22ec" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.628511] env[62070]: DEBUG nova.network.neutron [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 637.874062] env[62070]: DEBUG nova.compute.manager [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 637.876987] env[62070]: INFO nova.compute.manager [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] [instance: d7a90be3-d3d6-4626-944b-b907cf7fb64d] Took 1.03 seconds to deallocate network for instance. [ 638.165371] env[62070]: DEBUG nova.network.neutron [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 638.305792] env[62070]: DEBUG nova.network.neutron [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.408888] env[62070]: DEBUG oslo_concurrency.lockutils [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.811448] env[62070]: DEBUG oslo_concurrency.lockutils [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Releasing lock "refresh_cache-ad0dd218-5e45-4d22-9d94-5c25ba8b22ec" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 638.811652] env[62070]: DEBUG nova.compute.manager [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 638.811905] env[62070]: DEBUG nova.compute.manager [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 638.812658] env[62070]: DEBUG nova.network.neutron [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 638.849505] env[62070]: DEBUG nova.network.neutron [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 638.920728] env[62070]: INFO nova.scheduler.client.report [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Deleted allocations for instance d7a90be3-d3d6-4626-944b-b907cf7fb64d [ 639.119303] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d5eeb7-bd53-4a29-a406-d72203215857 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.134679] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ecd5df-05d7-4f8d-97ce-83174c2a02f0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.171922] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2ba37b8-fdaf-48ff-8e40-fb83635a83d8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.183305] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3437ff-1d76-4615-9000-4e5040725c1e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.196737] env[62070]: DEBUG nova.compute.provider_tree [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 639.354621] env[62070]: DEBUG nova.network.neutron [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.431066] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fceeb460-143a-4ad7-bde1-61f1f32cf593 tempest-VolumesAssistedSnapshotsTest-120562102 tempest-VolumesAssistedSnapshotsTest-120562102-project-member] Lock "d7a90be3-d3d6-4626-944b-b907cf7fb64d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.686s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.701846] env[62070]: DEBUG nova.scheduler.client.report [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 639.861251] env[62070]: INFO nova.compute.manager [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] [instance: ad0dd218-5e45-4d22-9d94-5c25ba8b22ec] Took 1.05 seconds to deallocate network for instance. [ 639.934282] env[62070]: DEBUG nova.compute.manager [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 640.208900] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.586s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.209462] env[62070]: DEBUG nova.compute.manager [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 640.214349] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.325s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.466388] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.718786] env[62070]: DEBUG nova.compute.utils [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 640.727246] env[62070]: DEBUG nova.compute.manager [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 640.727578] env[62070]: DEBUG nova.network.neutron [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 640.791544] env[62070]: DEBUG nova.policy [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d5c9f37a9bc41a49629dd309aa9023e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a4448e8a041f494d8faf51ef6d88c635', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 640.893597] env[62070]: INFO nova.scheduler.client.report [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Deleted allocations for instance ad0dd218-5e45-4d22-9d94-5c25ba8b22ec [ 641.181231] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e8a256-2419-4e82-92e6-8868029f4a0c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.191148] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91d18b8-1f3f-445f-9463-eee543a92957 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.223436] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa9a3a6-6276-4cd5-b0f9-e14cb14917be {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.226956] env[62070]: DEBUG nova.compute.manager [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 641.232547] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024b6238-23d6-43b9-acbf-d28c68af9966 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.247042] env[62070]: DEBUG nova.compute.provider_tree [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 641.331846] env[62070]: DEBUG nova.network.neutron [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Successfully created port: 9a8c0775-32a4-4f9c-9496-eb67187f85a0 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 641.404213] env[62070]: DEBUG oslo_concurrency.lockutils [None req-df68df2e-4fc9-4497-809c-48ba87337e66 tempest-AttachInterfacesV270Test-562938472 tempest-AttachInterfacesV270Test-562938472-project-member] Lock "ad0dd218-5e45-4d22-9d94-5c25ba8b22ec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.493s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 641.754627] env[62070]: DEBUG nova.scheduler.client.report [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 641.907336] env[62070]: DEBUG nova.compute.manager [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 642.150234] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Acquiring lock "42a5c5d8-5c3a-4568-b212-d87f2951a334" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.150743] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Lock "42a5c5d8-5c3a-4568-b212-d87f2951a334" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.178147] env[62070]: DEBUG nova.network.neutron [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Successfully created port: d3c3db24-ff59-47f7-91fd-6a0a01a4b152 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 642.241247] env[62070]: DEBUG nova.compute.manager [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 642.258442] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.044s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.259032] env[62070]: ERROR nova.compute.manager [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f047973c-4e00-4312-949c-099b3b04f342, please check neutron logs for more information. [ 642.259032] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Traceback (most recent call last): [ 642.259032] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 642.259032] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] self.driver.spawn(context, instance, image_meta, [ 642.259032] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 642.259032] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] self._vmops.spawn(context, instance, image_meta, injected_files, [ 642.259032] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 642.259032] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] vm_ref = self.build_virtual_machine(instance, [ 642.259032] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 642.259032] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] vif_infos = vmwarevif.get_vif_info(self._session, [ 642.259032] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 642.259648] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] for vif in network_info: [ 642.259648] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 642.259648] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] return self._sync_wrapper(fn, *args, **kwargs) [ 642.259648] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 642.259648] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] self.wait() [ 642.259648] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 642.259648] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] self[:] = self._gt.wait() [ 642.259648] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 642.259648] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] return self._exit_event.wait() [ 642.259648] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 642.259648] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] result = hub.switch() [ 642.259648] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 642.259648] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] return self.greenlet.switch() [ 642.260061] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 642.260061] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] result = function(*args, **kwargs) [ 642.260061] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 642.260061] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] return func(*args, **kwargs) [ 642.260061] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 642.260061] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] raise e [ 642.260061] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 642.260061] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] nwinfo = self.network_api.allocate_for_instance( [ 642.260061] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 642.260061] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] created_port_ids = self._update_ports_for_instance( [ 642.260061] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 642.260061] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] with excutils.save_and_reraise_exception(): [ 642.260061] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 642.260349] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] self.force_reraise() [ 642.260349] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 642.260349] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] raise self.value [ 642.260349] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 642.260349] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] updated_port = self._update_port( [ 642.260349] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 642.260349] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] _ensure_no_port_binding_failure(port) [ 642.260349] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 642.260349] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] raise exception.PortBindingFailed(port_id=port['id']) [ 642.260349] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] nova.exception.PortBindingFailed: Binding failed for port f047973c-4e00-4312-949c-099b3b04f342, please check neutron logs for more information. [ 642.260349] env[62070]: ERROR nova.compute.manager [instance: 3ee4e051-f51d-4840-a918-fdedad020557] [ 642.260683] env[62070]: DEBUG nova.compute.utils [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Binding failed for port f047973c-4e00-4312-949c-099b3b04f342, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 642.261019] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.207s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.263464] env[62070]: INFO nova.compute.claims [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 642.268027] env[62070]: DEBUG nova.compute.manager [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Build of instance 3ee4e051-f51d-4840-a918-fdedad020557 was re-scheduled: Binding failed for port f047973c-4e00-4312-949c-099b3b04f342, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 642.268673] env[62070]: DEBUG nova.compute.manager [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 642.268944] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Acquiring lock "refresh_cache-3ee4e051-f51d-4840-a918-fdedad020557" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.269225] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Acquired lock "refresh_cache-3ee4e051-f51d-4840-a918-fdedad020557" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.269324] env[62070]: DEBUG nova.network.neutron [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 642.276431] env[62070]: DEBUG nova.virt.hardware [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 642.276661] env[62070]: DEBUG nova.virt.hardware [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 642.276816] env[62070]: DEBUG nova.virt.hardware [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 642.276994] env[62070]: DEBUG nova.virt.hardware [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 642.277160] env[62070]: DEBUG nova.virt.hardware [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 642.277431] env[62070]: DEBUG nova.virt.hardware [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 642.277500] env[62070]: DEBUG nova.virt.hardware [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 642.277654] env[62070]: DEBUG nova.virt.hardware [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 642.277823] env[62070]: DEBUG nova.virt.hardware [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 642.277984] env[62070]: DEBUG nova.virt.hardware [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 642.278481] env[62070]: DEBUG nova.virt.hardware [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 642.279715] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-362f222d-842d-446e-97c9-2e86bf74c5f4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.290616] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd0344b-2109-4b8c-a8f0-165433e08925 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.442552] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.761523] env[62070]: DEBUG nova.network.neutron [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Successfully created port: 573b1d45-d148-48d1-bd73-3c2740606cfd {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 642.819540] env[62070]: DEBUG nova.network.neutron [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 643.103140] env[62070]: DEBUG nova.network.neutron [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.606119] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Releasing lock "refresh_cache-3ee4e051-f51d-4840-a918-fdedad020557" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.606119] env[62070]: DEBUG nova.compute.manager [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 643.606119] env[62070]: DEBUG nova.compute.manager [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 643.606119] env[62070]: DEBUG nova.network.neutron [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 643.630199] env[62070]: DEBUG nova.network.neutron [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 643.715058] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea25be9-d58f-4b66-af95-c368c1316799 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.726438] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5470a271-96b4-4eac-a6a4-2776b48307ff {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.758488] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-981f3a46-9848-4280-9ae0-4ad87ada2e2a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.772576] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ab0421c-d3e7-40fb-a356-3babf45b3142 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.787679] env[62070]: DEBUG nova.compute.provider_tree [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 644.132960] env[62070]: DEBUG nova.network.neutron [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.290721] env[62070]: DEBUG nova.scheduler.client.report [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 644.640176] env[62070]: INFO nova.compute.manager [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] [instance: 3ee4e051-f51d-4840-a918-fdedad020557] Took 1.03 seconds to deallocate network for instance. [ 644.798072] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.536s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.798072] env[62070]: DEBUG nova.compute.manager [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 644.800584] env[62070]: DEBUG oslo_concurrency.lockutils [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.063s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.093441] env[62070]: DEBUG nova.compute.manager [req-8eed7591-92f5-4056-9475-9027bcb479f2 req-8ee3aef8-9db5-4cbf-b82e-9fc886102b2f service nova] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Received event network-changed-9a8c0775-32a4-4f9c-9496-eb67187f85a0 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 645.093623] env[62070]: DEBUG nova.compute.manager [req-8eed7591-92f5-4056-9475-9027bcb479f2 req-8ee3aef8-9db5-4cbf-b82e-9fc886102b2f service nova] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Refreshing instance network info cache due to event network-changed-9a8c0775-32a4-4f9c-9496-eb67187f85a0. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 645.093834] env[62070]: DEBUG oslo_concurrency.lockutils [req-8eed7591-92f5-4056-9475-9027bcb479f2 req-8ee3aef8-9db5-4cbf-b82e-9fc886102b2f service nova] Acquiring lock "refresh_cache-95edf3d1-a987-4768-93be-1e045d7bfa99" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.093977] env[62070]: DEBUG oslo_concurrency.lockutils [req-8eed7591-92f5-4056-9475-9027bcb479f2 req-8ee3aef8-9db5-4cbf-b82e-9fc886102b2f service nova] Acquired lock "refresh_cache-95edf3d1-a987-4768-93be-1e045d7bfa99" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.094146] env[62070]: DEBUG nova.network.neutron [req-8eed7591-92f5-4056-9475-9027bcb479f2 req-8ee3aef8-9db5-4cbf-b82e-9fc886102b2f service nova] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Refreshing network info cache for port 9a8c0775-32a4-4f9c-9496-eb67187f85a0 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 645.307127] env[62070]: DEBUG nova.compute.utils [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 645.314912] env[62070]: DEBUG nova.compute.manager [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 645.321251] env[62070]: DEBUG nova.network.neutron [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 645.454748] env[62070]: ERROR nova.compute.manager [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9a8c0775-32a4-4f9c-9496-eb67187f85a0, please check neutron logs for more information. [ 645.454748] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 645.454748] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 645.454748] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 645.454748] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 645.454748] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 645.454748] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 645.454748] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 645.454748] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 645.454748] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 645.454748] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 645.454748] env[62070]: ERROR nova.compute.manager raise self.value [ 645.454748] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 645.454748] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 645.454748] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 645.454748] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 645.455193] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 645.455193] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 645.455193] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9a8c0775-32a4-4f9c-9496-eb67187f85a0, please check neutron logs for more information. [ 645.455193] env[62070]: ERROR nova.compute.manager [ 645.455193] env[62070]: Traceback (most recent call last): [ 645.455193] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 645.455193] env[62070]: listener.cb(fileno) [ 645.455193] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 645.455193] env[62070]: result = function(*args, **kwargs) [ 645.455193] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 645.455193] env[62070]: return func(*args, **kwargs) [ 645.455193] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 645.455193] env[62070]: raise e [ 645.455193] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 645.455193] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 645.455193] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 645.455193] env[62070]: created_port_ids = self._update_ports_for_instance( [ 645.455193] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 645.455193] env[62070]: with excutils.save_and_reraise_exception(): [ 645.455193] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 645.455193] env[62070]: self.force_reraise() [ 645.455193] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 645.455193] env[62070]: raise self.value [ 645.455193] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 645.455193] env[62070]: updated_port = self._update_port( [ 645.455193] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 645.455193] env[62070]: _ensure_no_port_binding_failure(port) [ 645.455193] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 645.455193] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 645.455887] env[62070]: nova.exception.PortBindingFailed: Binding failed for port 9a8c0775-32a4-4f9c-9496-eb67187f85a0, please check neutron logs for more information. [ 645.455887] env[62070]: Removing descriptor: 16 [ 645.455887] env[62070]: ERROR nova.compute.manager [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9a8c0775-32a4-4f9c-9496-eb67187f85a0, please check neutron logs for more information. [ 645.455887] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Traceback (most recent call last): [ 645.455887] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 645.455887] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] yield resources [ 645.455887] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 645.455887] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] self.driver.spawn(context, instance, image_meta, [ 645.455887] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 645.455887] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] self._vmops.spawn(context, instance, image_meta, injected_files, [ 645.455887] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 645.455887] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] vm_ref = self.build_virtual_machine(instance, [ 645.456173] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 645.456173] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] vif_infos = vmwarevif.get_vif_info(self._session, [ 645.456173] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 645.456173] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] for vif in network_info: [ 645.456173] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 645.456173] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] return self._sync_wrapper(fn, *args, **kwargs) [ 645.456173] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 645.456173] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] self.wait() [ 645.456173] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 645.456173] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] self[:] = self._gt.wait() [ 645.456173] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 645.456173] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] return self._exit_event.wait() [ 645.456173] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 645.456472] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] result = hub.switch() [ 645.456472] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 645.456472] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] return self.greenlet.switch() [ 645.456472] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 645.456472] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] result = function(*args, **kwargs) [ 645.456472] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 645.456472] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] return func(*args, **kwargs) [ 645.456472] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 645.456472] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] raise e [ 645.456472] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 645.456472] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] nwinfo = self.network_api.allocate_for_instance( [ 645.456472] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 645.456472] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] created_port_ids = self._update_ports_for_instance( [ 645.456781] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 645.456781] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] with excutils.save_and_reraise_exception(): [ 645.456781] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 645.456781] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] self.force_reraise() [ 645.456781] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 645.456781] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] raise self.value [ 645.456781] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 645.456781] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] updated_port = self._update_port( [ 645.456781] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 645.456781] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] _ensure_no_port_binding_failure(port) [ 645.456781] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 645.456781] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] raise exception.PortBindingFailed(port_id=port['id']) [ 645.457079] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] nova.exception.PortBindingFailed: Binding failed for port 9a8c0775-32a4-4f9c-9496-eb67187f85a0, please check neutron logs for more information. [ 645.457079] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] [ 645.457079] env[62070]: INFO nova.compute.manager [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Terminating instance [ 645.459230] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Acquiring lock "refresh_cache-95edf3d1-a987-4768-93be-1e045d7bfa99" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.485606] env[62070]: DEBUG nova.policy [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8733fd804aff428eb12dab1fade3597f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '99567c9d305e486f9559797458c9a0e5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 645.667496] env[62070]: DEBUG nova.network.neutron [req-8eed7591-92f5-4056-9475-9027bcb479f2 req-8ee3aef8-9db5-4cbf-b82e-9fc886102b2f service nova] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 645.670009] env[62070]: INFO nova.scheduler.client.report [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Deleted allocations for instance 3ee4e051-f51d-4840-a918-fdedad020557 [ 645.751839] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed61630-bc41-4dcd-9055-8d2a79e0f903 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.760975] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16e48e09-ed7f-4348-86b5-7696ac59cbeb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.796897] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c4ebf8-7fde-42fc-b138-4c0b3dc5699d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.805744] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a2ce6a6-4890-4513-a748-ba7edbf266b0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.820358] env[62070]: DEBUG nova.compute.manager [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 645.822449] env[62070]: DEBUG nova.compute.provider_tree [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 645.905109] env[62070]: DEBUG nova.network.neutron [req-8eed7591-92f5-4056-9475-9027bcb479f2 req-8ee3aef8-9db5-4cbf-b82e-9fc886102b2f service nova] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.181018] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ae386fad-6b3a-4fae-9284-28954bd3da74 tempest-ServersTestJSON-1024696343 tempest-ServersTestJSON-1024696343-project-member] Lock "3ee4e051-f51d-4840-a918-fdedad020557" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.257s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 646.333611] env[62070]: DEBUG nova.scheduler.client.report [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 647.048736] env[62070]: DEBUG nova.network.neutron [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Successfully created port: 18b8d97b-3f4c-445e-8136-989a05f41994 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 647.050832] env[62070]: DEBUG oslo_concurrency.lockutils [req-8eed7591-92f5-4056-9475-9027bcb479f2 req-8ee3aef8-9db5-4cbf-b82e-9fc886102b2f service nova] Releasing lock "refresh_cache-95edf3d1-a987-4768-93be-1e045d7bfa99" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 647.051278] env[62070]: DEBUG nova.compute.manager [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 647.054207] env[62070]: DEBUG oslo_concurrency.lockutils [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.253s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 647.054684] env[62070]: ERROR nova.compute.manager [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 676dde2d-c1da-454a-a873-1984ec64fadb, please check neutron logs for more information. [ 647.054684] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Traceback (most recent call last): [ 647.054684] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 647.054684] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] self.driver.spawn(context, instance, image_meta, [ 647.054684] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 647.054684] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] self._vmops.spawn(context, instance, image_meta, injected_files, [ 647.054684] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 647.054684] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] vm_ref = self.build_virtual_machine(instance, [ 647.054684] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 647.054684] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] vif_infos = vmwarevif.get_vif_info(self._session, [ 647.054684] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 647.055074] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] for vif in network_info: [ 647.055074] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 647.055074] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] return self._sync_wrapper(fn, *args, **kwargs) [ 647.055074] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 647.055074] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] self.wait() [ 647.055074] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 647.055074] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] self[:] = self._gt.wait() [ 647.055074] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 647.055074] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] return self._exit_event.wait() [ 647.055074] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 647.055074] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] result = hub.switch() [ 647.055074] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 647.055074] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] return self.greenlet.switch() [ 647.055365] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 647.055365] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] result = function(*args, **kwargs) [ 647.055365] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 647.055365] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] return func(*args, **kwargs) [ 647.055365] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 647.055365] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] raise e [ 647.055365] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 647.055365] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] nwinfo = self.network_api.allocate_for_instance( [ 647.055365] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 647.055365] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] created_port_ids = self._update_ports_for_instance( [ 647.055365] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 647.055365] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] with excutils.save_and_reraise_exception(): [ 647.055365] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 647.055656] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] self.force_reraise() [ 647.055656] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 647.055656] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] raise self.value [ 647.055656] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 647.055656] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] updated_port = self._update_port( [ 647.055656] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 647.055656] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] _ensure_no_port_binding_failure(port) [ 647.055656] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 647.055656] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] raise exception.PortBindingFailed(port_id=port['id']) [ 647.055656] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] nova.exception.PortBindingFailed: Binding failed for port 676dde2d-c1da-454a-a873-1984ec64fadb, please check neutron logs for more information. [ 647.055656] env[62070]: ERROR nova.compute.manager [instance: 73ab65b7-32e7-4206-8f31-466085319c71] [ 647.055947] env[62070]: DEBUG nova.compute.utils [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Binding failed for port 676dde2d-c1da-454a-a873-1984ec64fadb, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 647.057156] env[62070]: DEBUG nova.compute.manager [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 647.062174] env[62070]: DEBUG nova.compute.manager [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Build of instance 73ab65b7-32e7-4206-8f31-466085319c71 was re-scheduled: Binding failed for port 676dde2d-c1da-454a-a873-1984ec64fadb, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 647.062963] env[62070]: DEBUG nova.compute.manager [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 647.062963] env[62070]: DEBUG oslo_concurrency.lockutils [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "refresh_cache-73ab65b7-32e7-4206-8f31-466085319c71" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.062963] env[62070]: DEBUG oslo_concurrency.lockutils [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired lock "refresh_cache-73ab65b7-32e7-4206-8f31-466085319c71" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.064942] env[62070]: DEBUG nova.network.neutron [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 647.064942] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Acquired lock "refresh_cache-95edf3d1-a987-4768-93be-1e045d7bfa99" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.064942] env[62070]: DEBUG nova.network.neutron [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 647.065325] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.173s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.066643] env[62070]: INFO nova.compute.claims [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 647.070168] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Acquiring lock "a3fcb849-b015-43aa-8f95-0d4a87e2cecc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.070168] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Lock "a3fcb849-b015-43aa-8f95-0d4a87e2cecc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.094110] env[62070]: DEBUG nova.virt.hardware [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 647.094556] env[62070]: DEBUG nova.virt.hardware [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 647.094794] env[62070]: DEBUG nova.virt.hardware [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 647.095041] env[62070]: DEBUG nova.virt.hardware [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 647.095279] env[62070]: DEBUG nova.virt.hardware [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 647.095490] env[62070]: DEBUG nova.virt.hardware [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 647.096857] env[62070]: DEBUG nova.virt.hardware [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 647.096857] env[62070]: DEBUG nova.virt.hardware [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 647.096857] env[62070]: DEBUG nova.virt.hardware [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 647.096857] env[62070]: DEBUG nova.virt.hardware [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 647.096857] env[62070]: DEBUG nova.virt.hardware [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 647.097945] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3d2bad-a396-4829-a1fb-6d61b31014cb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.108804] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac4c246c-5e67-4d8a-8663-d571ba34f2d5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.445443] env[62070]: DEBUG nova.compute.manager [req-9af16df2-e1a7-4ea9-ae48-7ab122805567 req-8cde7ef9-2eae-45c6-aa34-1dc842bfbe93 service nova] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Received event network-vif-deleted-9a8c0775-32a4-4f9c-9496-eb67187f85a0 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 647.597265] env[62070]: DEBUG oslo_concurrency.lockutils [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.612444] env[62070]: DEBUG nova.network.neutron [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 647.651332] env[62070]: DEBUG nova.network.neutron [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 647.833102] env[62070]: DEBUG nova.network.neutron [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.867778] env[62070]: DEBUG nova.network.neutron [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.067334] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "a3c42653-9a4b-42d3-bc38-8d46d95c8f64" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.067622] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "a3c42653-9a4b-42d3-bc38-8d46d95c8f64" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.336461] env[62070]: DEBUG oslo_concurrency.lockutils [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Releasing lock "refresh_cache-73ab65b7-32e7-4206-8f31-466085319c71" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.336685] env[62070]: DEBUG nova.compute.manager [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 648.336866] env[62070]: DEBUG nova.compute.manager [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 648.337045] env[62070]: DEBUG nova.network.neutron [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 648.357765] env[62070]: DEBUG nova.network.neutron [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 648.369979] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Releasing lock "refresh_cache-95edf3d1-a987-4768-93be-1e045d7bfa99" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.370407] env[62070]: DEBUG nova.compute.manager [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 648.370590] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 648.370870] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cdf3d7c8-12e9-41ba-8b49-41ff677a0804 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.384133] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b88144cd-9287-4a5d-9872-a26e2efa944e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.412375] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 95edf3d1-a987-4768-93be-1e045d7bfa99 could not be found. [ 648.412623] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 648.412815] env[62070]: INFO nova.compute.manager [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Took 0.04 seconds to destroy the instance on the hypervisor. [ 648.413080] env[62070]: DEBUG oslo.service.loopingcall [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 648.415668] env[62070]: DEBUG nova.compute.manager [-] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 648.415668] env[62070]: DEBUG nova.network.neutron [-] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 648.480822] env[62070]: DEBUG nova.network.neutron [-] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 648.509364] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d22aa91-a1aa-48c2-b5d8-c1448dcf561c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.518452] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825c5111-69c5-4dd9-8980-f6b9e509960d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.551365] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-322cc7ab-9239-49c9-8ec3-6075eef62ca1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.562188] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1550f625-8143-40e8-9256-d9fb81795060 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.577987] env[62070]: DEBUG nova.compute.provider_tree [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 648.861863] env[62070]: DEBUG nova.network.neutron [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.083773] env[62070]: DEBUG nova.scheduler.client.report [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 649.213935] env[62070]: ERROR nova.compute.manager [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 18b8d97b-3f4c-445e-8136-989a05f41994, please check neutron logs for more information. [ 649.213935] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 649.213935] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 649.213935] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 649.213935] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 649.213935] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 649.213935] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 649.213935] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 649.213935] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 649.213935] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 649.213935] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 649.213935] env[62070]: ERROR nova.compute.manager raise self.value [ 649.213935] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 649.213935] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 649.213935] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 649.213935] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 649.214405] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 649.214405] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 649.214405] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 18b8d97b-3f4c-445e-8136-989a05f41994, please check neutron logs for more information. [ 649.214405] env[62070]: ERROR nova.compute.manager [ 649.214405] env[62070]: Traceback (most recent call last): [ 649.214405] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 649.214405] env[62070]: listener.cb(fileno) [ 649.214405] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 649.214405] env[62070]: result = function(*args, **kwargs) [ 649.214405] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 649.214405] env[62070]: return func(*args, **kwargs) [ 649.214405] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 649.214405] env[62070]: raise e [ 649.214405] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 649.214405] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 649.214405] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 649.214405] env[62070]: created_port_ids = self._update_ports_for_instance( [ 649.214405] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 649.214405] env[62070]: with excutils.save_and_reraise_exception(): [ 649.214405] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 649.214405] env[62070]: self.force_reraise() [ 649.214405] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 649.214405] env[62070]: raise self.value [ 649.214405] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 649.214405] env[62070]: updated_port = self._update_port( [ 649.214405] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 649.214405] env[62070]: _ensure_no_port_binding_failure(port) [ 649.214405] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 649.214405] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 649.215654] env[62070]: nova.exception.PortBindingFailed: Binding failed for port 18b8d97b-3f4c-445e-8136-989a05f41994, please check neutron logs for more information. [ 649.215654] env[62070]: Removing descriptor: 14 [ 649.215654] env[62070]: ERROR nova.compute.manager [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 18b8d97b-3f4c-445e-8136-989a05f41994, please check neutron logs for more information. [ 649.215654] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Traceback (most recent call last): [ 649.215654] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 649.215654] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] yield resources [ 649.215654] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 649.215654] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] self.driver.spawn(context, instance, image_meta, [ 649.215654] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 649.215654] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] self._vmops.spawn(context, instance, image_meta, injected_files, [ 649.215654] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 649.215654] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] vm_ref = self.build_virtual_machine(instance, [ 649.216915] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 649.216915] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] vif_infos = vmwarevif.get_vif_info(self._session, [ 649.216915] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 649.216915] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] for vif in network_info: [ 649.216915] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 649.216915] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] return self._sync_wrapper(fn, *args, **kwargs) [ 649.216915] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 649.216915] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] self.wait() [ 649.216915] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 649.216915] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] self[:] = self._gt.wait() [ 649.216915] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 649.216915] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] return self._exit_event.wait() [ 649.216915] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 649.217236] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] result = hub.switch() [ 649.217236] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 649.217236] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] return self.greenlet.switch() [ 649.217236] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 649.217236] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] result = function(*args, **kwargs) [ 649.217236] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 649.217236] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] return func(*args, **kwargs) [ 649.217236] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 649.217236] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] raise e [ 649.217236] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 649.217236] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] nwinfo = self.network_api.allocate_for_instance( [ 649.217236] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 649.217236] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] created_port_ids = self._update_ports_for_instance( [ 649.217960] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 649.217960] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] with excutils.save_and_reraise_exception(): [ 649.217960] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 649.217960] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] self.force_reraise() [ 649.217960] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 649.217960] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] raise self.value [ 649.217960] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 649.217960] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] updated_port = self._update_port( [ 649.217960] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 649.217960] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] _ensure_no_port_binding_failure(port) [ 649.217960] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 649.217960] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] raise exception.PortBindingFailed(port_id=port['id']) [ 649.218251] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] nova.exception.PortBindingFailed: Binding failed for port 18b8d97b-3f4c-445e-8136-989a05f41994, please check neutron logs for more information. [ 649.218251] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] [ 649.218251] env[62070]: INFO nova.compute.manager [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Terminating instance [ 649.218646] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Acquiring lock "refresh_cache-b0134b0f-23b4-4d34-b144-71ccdd9fba72" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 649.218803] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Acquired lock "refresh_cache-b0134b0f-23b4-4d34-b144-71ccdd9fba72" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.218966] env[62070]: DEBUG nova.network.neutron [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 649.305994] env[62070]: DEBUG nova.compute.manager [req-46cea33f-ccb7-410d-9c69-1ac0cd3f7950 req-6365f8ee-4e91-4578-b4f2-4b6880c339a9 service nova] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Received event network-changed-18b8d97b-3f4c-445e-8136-989a05f41994 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 649.306904] env[62070]: DEBUG nova.compute.manager [req-46cea33f-ccb7-410d-9c69-1ac0cd3f7950 req-6365f8ee-4e91-4578-b4f2-4b6880c339a9 service nova] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Refreshing instance network info cache due to event network-changed-18b8d97b-3f4c-445e-8136-989a05f41994. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 649.307025] env[62070]: DEBUG oslo_concurrency.lockutils [req-46cea33f-ccb7-410d-9c69-1ac0cd3f7950 req-6365f8ee-4e91-4578-b4f2-4b6880c339a9 service nova] Acquiring lock "refresh_cache-b0134b0f-23b4-4d34-b144-71ccdd9fba72" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 649.365615] env[62070]: INFO nova.compute.manager [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 73ab65b7-32e7-4206-8f31-466085319c71] Took 1.03 seconds to deallocate network for instance. [ 649.588618] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.523s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.595070] env[62070]: DEBUG nova.compute.manager [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 649.597365] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.009s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.601455] env[62070]: INFO nova.compute.claims [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 649.749130] env[62070]: DEBUG nova.network.neutron [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 649.808448] env[62070]: DEBUG oslo_concurrency.lockutils [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Acquiring lock "dd5d90e8-964a-4e1c-a98a-bcba37a1d79e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.808448] env[62070]: DEBUG oslo_concurrency.lockutils [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Lock "dd5d90e8-964a-4e1c-a98a-bcba37a1d79e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.825971] env[62070]: DEBUG nova.network.neutron [-] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.923921] env[62070]: DEBUG nova.network.neutron [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.105843] env[62070]: DEBUG nova.compute.utils [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 650.108569] env[62070]: DEBUG nova.compute.manager [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 650.108668] env[62070]: DEBUG nova.network.neutron [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 650.181102] env[62070]: DEBUG nova.policy [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1a60b0e059244d48be51ce4cbae5bb34', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '20fb34475f1e4ed18137d82fbf88d2b8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 650.327867] env[62070]: INFO nova.compute.manager [-] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Took 1.91 seconds to deallocate network for instance. [ 650.333608] env[62070]: DEBUG nova.compute.claims [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 650.333798] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.430361] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Releasing lock "refresh_cache-b0134b0f-23b4-4d34-b144-71ccdd9fba72" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.431610] env[62070]: DEBUG nova.compute.manager [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 650.432422] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 650.433231] env[62070]: DEBUG oslo_concurrency.lockutils [req-46cea33f-ccb7-410d-9c69-1ac0cd3f7950 req-6365f8ee-4e91-4578-b4f2-4b6880c339a9 service nova] Acquired lock "refresh_cache-b0134b0f-23b4-4d34-b144-71ccdd9fba72" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.436028] env[62070]: DEBUG nova.network.neutron [req-46cea33f-ccb7-410d-9c69-1ac0cd3f7950 req-6365f8ee-4e91-4578-b4f2-4b6880c339a9 service nova] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Refreshing network info cache for port 18b8d97b-3f4c-445e-8136-989a05f41994 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 650.436849] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c3616bd-044c-442f-9fc6-8dcdac1b5917 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.452206] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa7e42a0-409d-40a7-af21-2904ccca1895 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.481938] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b0134b0f-23b4-4d34-b144-71ccdd9fba72 could not be found. [ 650.482446] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 650.483263] env[62070]: INFO nova.compute.manager [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Took 0.05 seconds to destroy the instance on the hypervisor. [ 650.486071] env[62070]: DEBUG oslo.service.loopingcall [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 650.486071] env[62070]: INFO nova.scheduler.client.report [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Deleted allocations for instance 73ab65b7-32e7-4206-8f31-466085319c71 [ 650.492095] env[62070]: DEBUG nova.compute.manager [-] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 650.492311] env[62070]: DEBUG nova.network.neutron [-] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 650.522450] env[62070]: DEBUG nova.network.neutron [-] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 650.611190] env[62070]: DEBUG nova.compute.manager [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 650.925503] env[62070]: DEBUG nova.network.neutron [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Successfully created port: e9b88278-987c-4e58-b15c-0dc5b6239a91 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 650.965030] env[62070]: DEBUG nova.network.neutron [req-46cea33f-ccb7-410d-9c69-1ac0cd3f7950 req-6365f8ee-4e91-4578-b4f2-4b6880c339a9 service nova] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 651.004482] env[62070]: DEBUG oslo_concurrency.lockutils [None req-96d2c368-fe9f-4ee6-a530-cc95bf35b01a tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "73ab65b7-32e7-4206-8f31-466085319c71" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.284s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 651.025540] env[62070]: DEBUG nova.network.neutron [-] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.061479] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84a075b-74a5-48bd-abb6-428777d2674b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.071728] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d1e4f4-07fe-4642-b206-8d5612baa468 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.105422] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece71b20-f6ae-44c2-98a4-6db4d6bc7b3b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.122036] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd6a70c-9f30-40d7-8c82-21a1ced99dc9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.134406] env[62070]: DEBUG nova.compute.provider_tree [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 651.201226] env[62070]: DEBUG nova.network.neutron [req-46cea33f-ccb7-410d-9c69-1ac0cd3f7950 req-6365f8ee-4e91-4578-b4f2-4b6880c339a9 service nova] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.504221] env[62070]: DEBUG nova.compute.manager [req-7cc244e2-159c-4aaf-9255-d331e2ef47da req-9bf0c70d-69b2-4888-9917-fb2ba487bf94 service nova] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Received event network-vif-deleted-18b8d97b-3f4c-445e-8136-989a05f41994 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 651.508118] env[62070]: DEBUG nova.compute.manager [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 651.528052] env[62070]: INFO nova.compute.manager [-] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Took 1.04 seconds to deallocate network for instance. [ 651.530422] env[62070]: DEBUG nova.compute.claims [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 651.530589] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 651.625113] env[62070]: DEBUG nova.compute.manager [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 651.638288] env[62070]: DEBUG nova.scheduler.client.report [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 651.661388] env[62070]: DEBUG nova.virt.hardware [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 651.661661] env[62070]: DEBUG nova.virt.hardware [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 651.661817] env[62070]: DEBUG nova.virt.hardware [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 651.662130] env[62070]: DEBUG nova.virt.hardware [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 651.662204] env[62070]: DEBUG nova.virt.hardware [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 651.662292] env[62070]: DEBUG nova.virt.hardware [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 651.662532] env[62070]: DEBUG nova.virt.hardware [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 651.662637] env[62070]: DEBUG nova.virt.hardware [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 651.662832] env[62070]: DEBUG nova.virt.hardware [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 651.663103] env[62070]: DEBUG nova.virt.hardware [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 651.663202] env[62070]: DEBUG nova.virt.hardware [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 651.664064] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd0be6f-43d9-4991-89cc-7e5459ca943b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.673495] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ab9720a-9550-45bd-9660-66053348ef3b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.704231] env[62070]: DEBUG oslo_concurrency.lockutils [req-46cea33f-ccb7-410d-9c69-1ac0cd3f7950 req-6365f8ee-4e91-4578-b4f2-4b6880c339a9 service nova] Releasing lock "refresh_cache-b0134b0f-23b4-4d34-b144-71ccdd9fba72" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 652.052232] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.145783] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.550s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 652.146455] env[62070]: DEBUG nova.compute.manager [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 652.150927] env[62070]: DEBUG oslo_concurrency.lockutils [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.728s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.632330] env[62070]: DEBUG nova.compute.manager [req-3681859a-8272-4bac-baeb-4238a9fc92b8 req-8a0f5c77-0a61-4c5a-887e-191e706368c5 service nova] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Received event network-changed-e9b88278-987c-4e58-b15c-0dc5b6239a91 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 652.632557] env[62070]: DEBUG nova.compute.manager [req-3681859a-8272-4bac-baeb-4238a9fc92b8 req-8a0f5c77-0a61-4c5a-887e-191e706368c5 service nova] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Refreshing instance network info cache due to event network-changed-e9b88278-987c-4e58-b15c-0dc5b6239a91. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 652.632831] env[62070]: DEBUG oslo_concurrency.lockutils [req-3681859a-8272-4bac-baeb-4238a9fc92b8 req-8a0f5c77-0a61-4c5a-887e-191e706368c5 service nova] Acquiring lock "refresh_cache-d41f73e0-a188-4cc4-8391-938178aad496" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 652.632999] env[62070]: DEBUG oslo_concurrency.lockutils [req-3681859a-8272-4bac-baeb-4238a9fc92b8 req-8a0f5c77-0a61-4c5a-887e-191e706368c5 service nova] Acquired lock "refresh_cache-d41f73e0-a188-4cc4-8391-938178aad496" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.635480] env[62070]: DEBUG nova.network.neutron [req-3681859a-8272-4bac-baeb-4238a9fc92b8 req-8a0f5c77-0a61-4c5a-887e-191e706368c5 service nova] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Refreshing network info cache for port e9b88278-987c-4e58-b15c-0dc5b6239a91 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 652.655587] env[62070]: DEBUG nova.compute.utils [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 652.662389] env[62070]: DEBUG nova.compute.manager [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 652.663208] env[62070]: DEBUG nova.network.neutron [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 652.669129] env[62070]: DEBUG oslo_concurrency.lockutils [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "13e3576e-4f4c-4541-a637-daa124cbf8dd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.669575] env[62070]: DEBUG oslo_concurrency.lockutils [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "13e3576e-4f4c-4541-a637-daa124cbf8dd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.724744] env[62070]: DEBUG nova.policy [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4467ecd75565460483b0462360c0fff9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3169557e2614d4ca57474ab24b59294', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 652.879080] env[62070]: ERROR nova.compute.manager [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e9b88278-987c-4e58-b15c-0dc5b6239a91, please check neutron logs for more information. [ 652.879080] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 652.879080] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 652.879080] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 652.879080] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 652.879080] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 652.879080] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 652.879080] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 652.879080] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 652.879080] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 652.879080] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 652.879080] env[62070]: ERROR nova.compute.manager raise self.value [ 652.879080] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 652.879080] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 652.879080] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 652.879080] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 652.879538] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 652.879538] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 652.879538] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e9b88278-987c-4e58-b15c-0dc5b6239a91, please check neutron logs for more information. [ 652.879538] env[62070]: ERROR nova.compute.manager [ 652.879538] env[62070]: Traceback (most recent call last): [ 652.879538] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 652.879538] env[62070]: listener.cb(fileno) [ 652.879538] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 652.879538] env[62070]: result = function(*args, **kwargs) [ 652.879538] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 652.879538] env[62070]: return func(*args, **kwargs) [ 652.879538] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 652.879538] env[62070]: raise e [ 652.879538] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 652.879538] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 652.879538] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 652.879538] env[62070]: created_port_ids = self._update_ports_for_instance( [ 652.879538] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 652.879538] env[62070]: with excutils.save_and_reraise_exception(): [ 652.879538] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 652.879538] env[62070]: self.force_reraise() [ 652.879538] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 652.879538] env[62070]: raise self.value [ 652.879538] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 652.879538] env[62070]: updated_port = self._update_port( [ 652.879538] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 652.879538] env[62070]: _ensure_no_port_binding_failure(port) [ 652.879538] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 652.879538] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 652.880237] env[62070]: nova.exception.PortBindingFailed: Binding failed for port e9b88278-987c-4e58-b15c-0dc5b6239a91, please check neutron logs for more information. [ 652.880237] env[62070]: Removing descriptor: 16 [ 652.880425] env[62070]: ERROR nova.compute.manager [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e9b88278-987c-4e58-b15c-0dc5b6239a91, please check neutron logs for more information. [ 652.880425] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] Traceback (most recent call last): [ 652.880425] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 652.880425] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] yield resources [ 652.880425] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 652.880425] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] self.driver.spawn(context, instance, image_meta, [ 652.880425] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 652.880425] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] self._vmops.spawn(context, instance, image_meta, injected_files, [ 652.880425] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 652.880425] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] vm_ref = self.build_virtual_machine(instance, [ 652.880425] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 652.880681] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] vif_infos = vmwarevif.get_vif_info(self._session, [ 652.880681] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 652.880681] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] for vif in network_info: [ 652.880681] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 652.880681] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] return self._sync_wrapper(fn, *args, **kwargs) [ 652.880681] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 652.880681] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] self.wait() [ 652.880681] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 652.880681] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] self[:] = self._gt.wait() [ 652.880681] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 652.880681] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] return self._exit_event.wait() [ 652.880681] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 652.880681] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] result = hub.switch() [ 652.881117] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 652.881117] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] return self.greenlet.switch() [ 652.881117] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 652.881117] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] result = function(*args, **kwargs) [ 652.881117] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 652.881117] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] return func(*args, **kwargs) [ 652.881117] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 652.881117] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] raise e [ 652.881117] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 652.881117] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] nwinfo = self.network_api.allocate_for_instance( [ 652.881117] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 652.881117] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] created_port_ids = self._update_ports_for_instance( [ 652.881117] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 652.881501] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] with excutils.save_and_reraise_exception(): [ 652.881501] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 652.881501] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] self.force_reraise() [ 652.881501] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 652.881501] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] raise self.value [ 652.881501] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 652.881501] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] updated_port = self._update_port( [ 652.881501] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 652.881501] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] _ensure_no_port_binding_failure(port) [ 652.881501] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 652.881501] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] raise exception.PortBindingFailed(port_id=port['id']) [ 652.881501] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] nova.exception.PortBindingFailed: Binding failed for port e9b88278-987c-4e58-b15c-0dc5b6239a91, please check neutron logs for more information. [ 652.881501] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] [ 652.881819] env[62070]: INFO nova.compute.manager [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Terminating instance [ 652.883418] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Acquiring lock "refresh_cache-d41f73e0-a188-4cc4-8391-938178aad496" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.088809] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8143b9b-e203-4c31-abcc-33f81f816cf8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.097761] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6e5c33a-d14d-42de-b44a-a273d230f6d4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.128498] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb017215-1bfc-41b0-bcd8-83f880005d39 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.140505] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7877dfd3-9476-4fbb-8341-336b49674295 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.154141] env[62070]: DEBUG nova.compute.provider_tree [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 653.163314] env[62070]: DEBUG nova.compute.manager [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 653.171291] env[62070]: DEBUG nova.network.neutron [req-3681859a-8272-4bac-baeb-4238a9fc92b8 req-8a0f5c77-0a61-4c5a-887e-191e706368c5 service nova] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 653.296705] env[62070]: DEBUG nova.network.neutron [req-3681859a-8272-4bac-baeb-4238a9fc92b8 req-8a0f5c77-0a61-4c5a-887e-191e706368c5 service nova] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.303321] env[62070]: DEBUG nova.network.neutron [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Successfully created port: bfdbfcd7-c0eb-419c-a533-8f9444d52664 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 653.659226] env[62070]: DEBUG nova.scheduler.client.report [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 653.754217] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Acquiring lock "748c94c7-1233-44f4-a71a-176b26518399" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.755959] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Lock "748c94c7-1233-44f4-a71a-176b26518399" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.800292] env[62070]: DEBUG oslo_concurrency.lockutils [req-3681859a-8272-4bac-baeb-4238a9fc92b8 req-8a0f5c77-0a61-4c5a-887e-191e706368c5 service nova] Releasing lock "refresh_cache-d41f73e0-a188-4cc4-8391-938178aad496" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.800702] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Acquired lock "refresh_cache-d41f73e0-a188-4cc4-8391-938178aad496" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.801440] env[62070]: DEBUG nova.network.neutron [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 654.085050] env[62070]: DEBUG nova.compute.manager [req-47163baa-0868-4070-8056-c99596680fb3 req-e6847639-372e-494c-9e43-bf9dcea15e8c service nova] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Received event network-changed-bfdbfcd7-c0eb-419c-a533-8f9444d52664 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 654.085237] env[62070]: DEBUG nova.compute.manager [req-47163baa-0868-4070-8056-c99596680fb3 req-e6847639-372e-494c-9e43-bf9dcea15e8c service nova] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Refreshing instance network info cache due to event network-changed-bfdbfcd7-c0eb-419c-a533-8f9444d52664. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 654.085435] env[62070]: DEBUG oslo_concurrency.lockutils [req-47163baa-0868-4070-8056-c99596680fb3 req-e6847639-372e-494c-9e43-bf9dcea15e8c service nova] Acquiring lock "refresh_cache-a5b98f92-d287-4d40-8a21-d2de64026970" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.085561] env[62070]: DEBUG oslo_concurrency.lockutils [req-47163baa-0868-4070-8056-c99596680fb3 req-e6847639-372e-494c-9e43-bf9dcea15e8c service nova] Acquired lock "refresh_cache-a5b98f92-d287-4d40-8a21-d2de64026970" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.085736] env[62070]: DEBUG nova.network.neutron [req-47163baa-0868-4070-8056-c99596680fb3 req-e6847639-372e-494c-9e43-bf9dcea15e8c service nova] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Refreshing network info cache for port bfdbfcd7-c0eb-419c-a533-8f9444d52664 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 654.166311] env[62070]: DEBUG oslo_concurrency.lockutils [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.015s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.166965] env[62070]: ERROR nova.compute.manager [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e004e4fb-3439-483e-936e-b19dd561ecb0, please check neutron logs for more information. [ 654.166965] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Traceback (most recent call last): [ 654.166965] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 654.166965] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] self.driver.spawn(context, instance, image_meta, [ 654.166965] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 654.166965] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] self._vmops.spawn(context, instance, image_meta, injected_files, [ 654.166965] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 654.166965] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] vm_ref = self.build_virtual_machine(instance, [ 654.166965] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 654.166965] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] vif_infos = vmwarevif.get_vif_info(self._session, [ 654.166965] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 654.167315] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] for vif in network_info: [ 654.167315] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 654.167315] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] return self._sync_wrapper(fn, *args, **kwargs) [ 654.167315] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 654.167315] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] self.wait() [ 654.167315] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 654.167315] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] self[:] = self._gt.wait() [ 654.167315] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 654.167315] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] return self._exit_event.wait() [ 654.167315] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 654.167315] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] result = hub.switch() [ 654.167315] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 654.167315] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] return self.greenlet.switch() [ 654.167680] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 654.167680] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] result = function(*args, **kwargs) [ 654.167680] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 654.167680] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] return func(*args, **kwargs) [ 654.167680] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 654.167680] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] raise e [ 654.167680] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 654.167680] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] nwinfo = self.network_api.allocate_for_instance( [ 654.167680] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 654.167680] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] created_port_ids = self._update_ports_for_instance( [ 654.167680] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 654.167680] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] with excutils.save_and_reraise_exception(): [ 654.167680] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 654.168050] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] self.force_reraise() [ 654.168050] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 654.168050] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] raise self.value [ 654.168050] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 654.168050] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] updated_port = self._update_port( [ 654.168050] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 654.168050] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] _ensure_no_port_binding_failure(port) [ 654.168050] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 654.168050] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] raise exception.PortBindingFailed(port_id=port['id']) [ 654.168050] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] nova.exception.PortBindingFailed: Binding failed for port e004e4fb-3439-483e-936e-b19dd561ecb0, please check neutron logs for more information. [ 654.168050] env[62070]: ERROR nova.compute.manager [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] [ 654.168349] env[62070]: DEBUG nova.compute.utils [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Binding failed for port e004e4fb-3439-483e-936e-b19dd561ecb0, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 654.169087] env[62070]: DEBUG oslo_concurrency.lockutils [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.760s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.170763] env[62070]: INFO nova.compute.claims [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 654.173778] env[62070]: DEBUG nova.compute.manager [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Build of instance 73cb4a44-a4d9-4c8c-8472-f99b5d449cec was re-scheduled: Binding failed for port e004e4fb-3439-483e-936e-b19dd561ecb0, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 654.177064] env[62070]: DEBUG nova.compute.manager [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 654.177064] env[62070]: DEBUG oslo_concurrency.lockutils [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Acquiring lock "refresh_cache-73cb4a44-a4d9-4c8c-8472-f99b5d449cec" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.177064] env[62070]: DEBUG oslo_concurrency.lockutils [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Acquired lock "refresh_cache-73cb4a44-a4d9-4c8c-8472-f99b5d449cec" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.177064] env[62070]: DEBUG nova.network.neutron [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 654.177512] env[62070]: DEBUG nova.compute.manager [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 654.204337] env[62070]: DEBUG nova.virt.hardware [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 654.204337] env[62070]: DEBUG nova.virt.hardware [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 654.204337] env[62070]: DEBUG nova.virt.hardware [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 654.204542] env[62070]: DEBUG nova.virt.hardware [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 654.204542] env[62070]: DEBUG nova.virt.hardware [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 654.205083] env[62070]: DEBUG nova.virt.hardware [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 654.206620] env[62070]: DEBUG nova.virt.hardware [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 654.206620] env[62070]: DEBUG nova.virt.hardware [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 654.206620] env[62070]: DEBUG nova.virt.hardware [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 654.206986] env[62070]: DEBUG nova.virt.hardware [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 654.207319] env[62070]: DEBUG nova.virt.hardware [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 654.208387] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa1c41d-ca54-401b-94d4-96b42afa2010 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.219325] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8520d63a-6ae4-4a45-8385-e9b4dc13df72 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.239132] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 654.239132] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 654.291528] env[62070]: ERROR nova.compute.manager [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bfdbfcd7-c0eb-419c-a533-8f9444d52664, please check neutron logs for more information. [ 654.291528] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 654.291528] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 654.291528] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 654.291528] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 654.291528] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 654.291528] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 654.291528] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 654.291528] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 654.291528] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 654.291528] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 654.291528] env[62070]: ERROR nova.compute.manager raise self.value [ 654.291528] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 654.291528] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 654.291528] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 654.291528] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 654.291967] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 654.291967] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 654.291967] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bfdbfcd7-c0eb-419c-a533-8f9444d52664, please check neutron logs for more information. [ 654.291967] env[62070]: ERROR nova.compute.manager [ 654.291967] env[62070]: Traceback (most recent call last): [ 654.292102] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 654.292102] env[62070]: listener.cb(fileno) [ 654.292102] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 654.292102] env[62070]: result = function(*args, **kwargs) [ 654.292102] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 654.292102] env[62070]: return func(*args, **kwargs) [ 654.292102] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 654.292102] env[62070]: raise e [ 654.292102] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 654.292102] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 654.292102] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 654.292102] env[62070]: created_port_ids = self._update_ports_for_instance( [ 654.292102] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 654.292102] env[62070]: with excutils.save_and_reraise_exception(): [ 654.292102] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 654.292102] env[62070]: self.force_reraise() [ 654.292102] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 654.292102] env[62070]: raise self.value [ 654.292102] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 654.292102] env[62070]: updated_port = self._update_port( [ 654.292102] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 654.292102] env[62070]: _ensure_no_port_binding_failure(port) [ 654.292102] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 654.292102] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 654.292102] env[62070]: nova.exception.PortBindingFailed: Binding failed for port bfdbfcd7-c0eb-419c-a533-8f9444d52664, please check neutron logs for more information. [ 654.292102] env[62070]: Removing descriptor: 14 [ 654.292790] env[62070]: ERROR nova.compute.manager [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bfdbfcd7-c0eb-419c-a533-8f9444d52664, please check neutron logs for more information. [ 654.292790] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Traceback (most recent call last): [ 654.292790] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 654.292790] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] yield resources [ 654.292790] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 654.292790] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] self.driver.spawn(context, instance, image_meta, [ 654.292790] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 654.292790] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] self._vmops.spawn(context, instance, image_meta, injected_files, [ 654.292790] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 654.292790] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] vm_ref = self.build_virtual_machine(instance, [ 654.292790] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 654.293138] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] vif_infos = vmwarevif.get_vif_info(self._session, [ 654.293138] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 654.293138] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] for vif in network_info: [ 654.293138] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 654.293138] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] return self._sync_wrapper(fn, *args, **kwargs) [ 654.293138] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 654.293138] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] self.wait() [ 654.293138] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 654.293138] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] self[:] = self._gt.wait() [ 654.293138] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 654.293138] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] return self._exit_event.wait() [ 654.293138] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 654.293138] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] result = hub.switch() [ 654.293471] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 654.293471] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] return self.greenlet.switch() [ 654.293471] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 654.293471] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] result = function(*args, **kwargs) [ 654.293471] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 654.293471] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] return func(*args, **kwargs) [ 654.293471] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 654.293471] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] raise e [ 654.293471] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 654.293471] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] nwinfo = self.network_api.allocate_for_instance( [ 654.293471] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 654.293471] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] created_port_ids = self._update_ports_for_instance( [ 654.293471] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 654.293800] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] with excutils.save_and_reraise_exception(): [ 654.293800] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 654.293800] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] self.force_reraise() [ 654.293800] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 654.293800] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] raise self.value [ 654.293800] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 654.293800] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] updated_port = self._update_port( [ 654.293800] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 654.293800] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] _ensure_no_port_binding_failure(port) [ 654.293800] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 654.293800] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] raise exception.PortBindingFailed(port_id=port['id']) [ 654.293800] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] nova.exception.PortBindingFailed: Binding failed for port bfdbfcd7-c0eb-419c-a533-8f9444d52664, please check neutron logs for more information. [ 654.293800] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] [ 654.294149] env[62070]: INFO nova.compute.manager [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Terminating instance [ 654.296156] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Acquiring lock "refresh_cache-a5b98f92-d287-4d40-8a21-d2de64026970" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.318209] env[62070]: DEBUG nova.network.neutron [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 654.407567] env[62070]: DEBUG nova.network.neutron [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.608891] env[62070]: DEBUG nova.network.neutron [req-47163baa-0868-4070-8056-c99596680fb3 req-e6847639-372e-494c-9e43-bf9dcea15e8c service nova] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 654.654959] env[62070]: DEBUG nova.compute.manager [req-f610162c-469c-4af0-a023-5f4a3b7b6476 req-76a7feeb-5112-4d4a-a8a9-df8039d8575c service nova] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Received event network-vif-deleted-e9b88278-987c-4e58-b15c-0dc5b6239a91 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 654.698259] env[62070]: DEBUG nova.network.neutron [req-47163baa-0868-4070-8056-c99596680fb3 req-e6847639-372e-494c-9e43-bf9dcea15e8c service nova] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.705769] env[62070]: DEBUG nova.network.neutron [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 654.744928] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 654.745200] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Starting heal instance info cache {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 654.745377] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Rebuilding the list of instances to heal {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 654.810197] env[62070]: DEBUG nova.network.neutron [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.910814] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Releasing lock "refresh_cache-d41f73e0-a188-4cc4-8391-938178aad496" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.911296] env[62070]: DEBUG nova.compute.manager [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 654.911486] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 654.911772] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f0ffcd58-fa8d-48bc-a372-b716f29d8c6a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.920877] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d45b8b-ac3b-4b93-b4c8-1bf09b075e76 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.943145] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d41f73e0-a188-4cc4-8391-938178aad496 could not be found. [ 654.943356] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 654.943534] env[62070]: INFO nova.compute.manager [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Took 0.03 seconds to destroy the instance on the hypervisor. [ 654.943763] env[62070]: DEBUG oslo.service.loopingcall [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 654.943970] env[62070]: DEBUG nova.compute.manager [-] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 654.944078] env[62070]: DEBUG nova.network.neutron [-] [instance: d41f73e0-a188-4cc4-8391-938178aad496] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 654.958114] env[62070]: DEBUG nova.network.neutron [-] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 655.201007] env[62070]: DEBUG oslo_concurrency.lockutils [req-47163baa-0868-4070-8056-c99596680fb3 req-e6847639-372e-494c-9e43-bf9dcea15e8c service nova] Releasing lock "refresh_cache-a5b98f92-d287-4d40-8a21-d2de64026970" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 655.201478] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Acquired lock "refresh_cache-a5b98f92-d287-4d40-8a21-d2de64026970" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.201667] env[62070]: DEBUG nova.network.neutron [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 655.250794] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Skipping network cache update for instance because it is Building. {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 655.250961] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Skipping network cache update for instance because it is Building. {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 655.251104] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Skipping network cache update for instance because it is Building. {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 655.251273] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Skipping network cache update for instance because it is Building. {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 655.251591] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Skipping network cache update for instance because it is Building. {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 655.251700] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Didn't find any instances for network info cache update. {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 655.252031] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 655.252155] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 655.252330] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 655.252467] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 655.252653] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 655.252840] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 655.252974] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62070) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 655.253142] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 655.312837] env[62070]: DEBUG oslo_concurrency.lockutils [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Releasing lock "refresh_cache-73cb4a44-a4d9-4c8c-8472-f99b5d449cec" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 655.313088] env[62070]: DEBUG nova.compute.manager [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 655.313272] env[62070]: DEBUG nova.compute.manager [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 655.313434] env[62070]: DEBUG nova.network.neutron [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 655.333751] env[62070]: DEBUG nova.network.neutron [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 655.461470] env[62070]: DEBUG nova.network.neutron [-] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.556881] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2589ad-725e-462d-a896-837aac81396f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.565374] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235828c5-dee2-40de-bf79-895b5bcb8f50 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.596152] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc75ae1b-2c72-4abe-a11d-81de410e3add {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.605438] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af7e34cf-5a4b-4422-b6d8-b7f17a4d4665 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.620853] env[62070]: DEBUG nova.compute.provider_tree [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 655.719047] env[62070]: DEBUG nova.network.neutron [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 655.756749] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.827481] env[62070]: DEBUG nova.network.neutron [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.838242] env[62070]: DEBUG nova.network.neutron [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.969412] env[62070]: INFO nova.compute.manager [-] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Took 1.03 seconds to deallocate network for instance. [ 655.971712] env[62070]: DEBUG nova.compute.claims [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 655.971893] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.108594] env[62070]: DEBUG nova.compute.manager [req-0000069e-62be-4dc8-92f3-53247ee46ffa req-8601ac80-b2a3-4f0c-8128-2c6501f92467 service nova] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Received event network-vif-deleted-bfdbfcd7-c0eb-419c-a533-8f9444d52664 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 656.123386] env[62070]: DEBUG nova.scheduler.client.report [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 656.332033] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Releasing lock "refresh_cache-a5b98f92-d287-4d40-8a21-d2de64026970" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 656.332479] env[62070]: DEBUG nova.compute.manager [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 656.332677] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 656.333081] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8f3a7605-c7ed-4bb9-95dd-0d9ef74c1b3e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.342511] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2904c7ad-e047-4038-9bce-627d99fe82eb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.353153] env[62070]: INFO nova.compute.manager [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] [instance: 73cb4a44-a4d9-4c8c-8472-f99b5d449cec] Took 1.04 seconds to deallocate network for instance. [ 656.371788] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a5b98f92-d287-4d40-8a21-d2de64026970 could not be found. [ 656.371907] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 656.372088] env[62070]: INFO nova.compute.manager [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Took 0.04 seconds to destroy the instance on the hypervisor. [ 656.372351] env[62070]: DEBUG oslo.service.loopingcall [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 656.372573] env[62070]: DEBUG nova.compute.manager [-] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 656.372656] env[62070]: DEBUG nova.network.neutron [-] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 656.398447] env[62070]: DEBUG nova.network.neutron [-] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 656.628649] env[62070]: DEBUG oslo_concurrency.lockutils [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.459s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 656.629221] env[62070]: DEBUG nova.compute.manager [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 656.632012] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.166s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 656.633399] env[62070]: INFO nova.compute.claims [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 656.901602] env[62070]: DEBUG nova.network.neutron [-] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.137709] env[62070]: DEBUG nova.compute.utils [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 657.141167] env[62070]: DEBUG nova.compute.manager [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 657.142029] env[62070]: DEBUG nova.network.neutron [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 657.190981] env[62070]: DEBUG nova.policy [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '31e40d410f2647fdb02ae0c8ddb0f767', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ab0988822a1f498998b09b96b581a78d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 657.394331] env[62070]: INFO nova.scheduler.client.report [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Deleted allocations for instance 73cb4a44-a4d9-4c8c-8472-f99b5d449cec [ 657.404075] env[62070]: INFO nova.compute.manager [-] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Took 1.03 seconds to deallocate network for instance. [ 657.408037] env[62070]: DEBUG nova.compute.claims [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 657.408187] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.534303] env[62070]: DEBUG nova.network.neutron [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Successfully created port: ef6f1c75-e8f0-4dd1-bd35-95e061959be1 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 657.645086] env[62070]: DEBUG nova.compute.manager [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 657.904729] env[62070]: DEBUG oslo_concurrency.lockutils [None req-defc523d-479c-483f-91ae-c073714ce1fe tempest-FloatingIPsAssociationNegativeTestJSON-743310595 tempest-FloatingIPsAssociationNegativeTestJSON-743310595-project-member] Lock "73cb4a44-a4d9-4c8c-8472-f99b5d449cec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.581s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 658.073923] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fdefd50-5226-4da3-8378-1bef9b05198a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.084932] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0e28a0-783e-426a-abea-57ff1250c63d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.114271] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8823c32-d850-44bb-868b-bd39d6cdd260 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.127462] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f6cd85-be07-4e58-9b28-0e58212f5734 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.138962] env[62070]: DEBUG nova.compute.provider_tree [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 658.407281] env[62070]: DEBUG nova.compute.manager [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 658.598626] env[62070]: DEBUG nova.compute.manager [req-7768b444-220a-41ed-b333-fb5220b40644 req-2c1d9278-2225-41b1-af91-b806d930d2fc service nova] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Received event network-changed-ef6f1c75-e8f0-4dd1-bd35-95e061959be1 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 658.598860] env[62070]: DEBUG nova.compute.manager [req-7768b444-220a-41ed-b333-fb5220b40644 req-2c1d9278-2225-41b1-af91-b806d930d2fc service nova] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Refreshing instance network info cache due to event network-changed-ef6f1c75-e8f0-4dd1-bd35-95e061959be1. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 658.599148] env[62070]: DEBUG oslo_concurrency.lockutils [req-7768b444-220a-41ed-b333-fb5220b40644 req-2c1d9278-2225-41b1-af91-b806d930d2fc service nova] Acquiring lock "refresh_cache-adccca24-ed77-410b-8b69-19137cadafbd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 658.599312] env[62070]: DEBUG oslo_concurrency.lockutils [req-7768b444-220a-41ed-b333-fb5220b40644 req-2c1d9278-2225-41b1-af91-b806d930d2fc service nova] Acquired lock "refresh_cache-adccca24-ed77-410b-8b69-19137cadafbd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.599386] env[62070]: DEBUG nova.network.neutron [req-7768b444-220a-41ed-b333-fb5220b40644 req-2c1d9278-2225-41b1-af91-b806d930d2fc service nova] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Refreshing network info cache for port ef6f1c75-e8f0-4dd1-bd35-95e061959be1 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 658.642999] env[62070]: DEBUG nova.scheduler.client.report [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 658.655772] env[62070]: DEBUG nova.compute.manager [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 658.678422] env[62070]: DEBUG nova.virt.hardware [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 658.678681] env[62070]: DEBUG nova.virt.hardware [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 658.678843] env[62070]: DEBUG nova.virt.hardware [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 658.679051] env[62070]: DEBUG nova.virt.hardware [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 658.679191] env[62070]: DEBUG nova.virt.hardware [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 658.679733] env[62070]: DEBUG nova.virt.hardware [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 658.679733] env[62070]: DEBUG nova.virt.hardware [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 658.679733] env[62070]: DEBUG nova.virt.hardware [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 658.679892] env[62070]: DEBUG nova.virt.hardware [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 658.680227] env[62070]: DEBUG nova.virt.hardware [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 658.680443] env[62070]: DEBUG nova.virt.hardware [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 658.681589] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bcdbe2f-bdec-4d2d-a38a-52c2d857fb9f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.690347] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7baba0-621f-4fc3-acba-c15c1d90cd5d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.750118] env[62070]: ERROR nova.compute.manager [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ef6f1c75-e8f0-4dd1-bd35-95e061959be1, please check neutron logs for more information. [ 658.750118] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 658.750118] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 658.750118] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 658.750118] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 658.750118] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 658.750118] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 658.750118] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 658.750118] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 658.750118] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 658.750118] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 658.750118] env[62070]: ERROR nova.compute.manager raise self.value [ 658.750118] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 658.750118] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 658.750118] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 658.750118] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 658.750609] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 658.750609] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 658.750609] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ef6f1c75-e8f0-4dd1-bd35-95e061959be1, please check neutron logs for more information. [ 658.750609] env[62070]: ERROR nova.compute.manager [ 658.750609] env[62070]: Traceback (most recent call last): [ 658.750609] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 658.750609] env[62070]: listener.cb(fileno) [ 658.750609] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 658.750609] env[62070]: result = function(*args, **kwargs) [ 658.750609] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 658.750609] env[62070]: return func(*args, **kwargs) [ 658.750609] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 658.750609] env[62070]: raise e [ 658.750609] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 658.750609] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 658.750609] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 658.750609] env[62070]: created_port_ids = self._update_ports_for_instance( [ 658.750609] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 658.750609] env[62070]: with excutils.save_and_reraise_exception(): [ 658.750609] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 658.750609] env[62070]: self.force_reraise() [ 658.750609] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 658.750609] env[62070]: raise self.value [ 658.750609] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 658.750609] env[62070]: updated_port = self._update_port( [ 658.750609] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 658.750609] env[62070]: _ensure_no_port_binding_failure(port) [ 658.750609] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 658.750609] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 658.751521] env[62070]: nova.exception.PortBindingFailed: Binding failed for port ef6f1c75-e8f0-4dd1-bd35-95e061959be1, please check neutron logs for more information. [ 658.751521] env[62070]: Removing descriptor: 16 [ 658.751521] env[62070]: ERROR nova.compute.manager [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ef6f1c75-e8f0-4dd1-bd35-95e061959be1, please check neutron logs for more information. [ 658.751521] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] Traceback (most recent call last): [ 658.751521] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 658.751521] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] yield resources [ 658.751521] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 658.751521] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] self.driver.spawn(context, instance, image_meta, [ 658.751521] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 658.751521] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 658.751521] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 658.751521] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] vm_ref = self.build_virtual_machine(instance, [ 658.751924] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 658.751924] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] vif_infos = vmwarevif.get_vif_info(self._session, [ 658.751924] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 658.751924] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] for vif in network_info: [ 658.751924] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 658.751924] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] return self._sync_wrapper(fn, *args, **kwargs) [ 658.751924] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 658.751924] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] self.wait() [ 658.751924] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 658.751924] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] self[:] = self._gt.wait() [ 658.751924] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 658.751924] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] return self._exit_event.wait() [ 658.751924] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 658.752317] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] result = hub.switch() [ 658.752317] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 658.752317] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] return self.greenlet.switch() [ 658.752317] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 658.752317] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] result = function(*args, **kwargs) [ 658.752317] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 658.752317] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] return func(*args, **kwargs) [ 658.752317] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 658.752317] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] raise e [ 658.752317] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 658.752317] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] nwinfo = self.network_api.allocate_for_instance( [ 658.752317] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 658.752317] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] created_port_ids = self._update_ports_for_instance( [ 658.752693] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 658.752693] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] with excutils.save_and_reraise_exception(): [ 658.752693] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 658.752693] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] self.force_reraise() [ 658.752693] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 658.752693] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] raise self.value [ 658.752693] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 658.752693] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] updated_port = self._update_port( [ 658.752693] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 658.752693] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] _ensure_no_port_binding_failure(port) [ 658.752693] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 658.752693] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] raise exception.PortBindingFailed(port_id=port['id']) [ 658.753065] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] nova.exception.PortBindingFailed: Binding failed for port ef6f1c75-e8f0-4dd1-bd35-95e061959be1, please check neutron logs for more information. [ 658.753065] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] [ 658.753065] env[62070]: INFO nova.compute.manager [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Terminating instance [ 658.753342] env[62070]: DEBUG oslo_concurrency.lockutils [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Acquiring lock "refresh_cache-adccca24-ed77-410b-8b69-19137cadafbd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 658.851497] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Acquiring lock "359ae9f2-a907-459e-99b9-3e043d5d015f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 658.851744] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lock "359ae9f2-a907-459e-99b9-3e043d5d015f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 658.927148] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.108070] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Acquiring lock "242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.108308] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lock "242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.121932] env[62070]: DEBUG nova.network.neutron [req-7768b444-220a-41ed-b333-fb5220b40644 req-2c1d9278-2225-41b1-af91-b806d930d2fc service nova] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 659.152208] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.520s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.152737] env[62070]: DEBUG nova.compute.manager [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 659.155914] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.713s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.157344] env[62070]: INFO nova.compute.claims [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 659.210126] env[62070]: DEBUG nova.network.neutron [req-7768b444-220a-41ed-b333-fb5220b40644 req-2c1d9278-2225-41b1-af91-b806d930d2fc service nova] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.661985] env[62070]: DEBUG nova.compute.utils [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 659.675926] env[62070]: DEBUG nova.compute.manager [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 659.675926] env[62070]: DEBUG nova.network.neutron [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 659.714138] env[62070]: DEBUG oslo_concurrency.lockutils [req-7768b444-220a-41ed-b333-fb5220b40644 req-2c1d9278-2225-41b1-af91-b806d930d2fc service nova] Releasing lock "refresh_cache-adccca24-ed77-410b-8b69-19137cadafbd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 659.714138] env[62070]: DEBUG oslo_concurrency.lockutils [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Acquired lock "refresh_cache-adccca24-ed77-410b-8b69-19137cadafbd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.714138] env[62070]: DEBUG nova.network.neutron [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 659.803627] env[62070]: DEBUG nova.policy [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '85707399ddf04d03bfb487560df1212e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd079c0ef3ed745fcaf69dc728dca4466', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 660.174106] env[62070]: DEBUG nova.compute.manager [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 660.238997] env[62070]: DEBUG nova.network.neutron [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 660.340939] env[62070]: DEBUG nova.network.neutron [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Successfully created port: 7af3fb3f-ac6d-4b68-b071-de50eab1a1f3 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 660.343969] env[62070]: DEBUG nova.network.neutron [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.597946] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-231ff533-9030-4117-9058-7b5d2ee3c7d1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.606942] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e62afc0-0913-4cb4-8db5-f0f46a80bdb0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.640494] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76af35e5-a794-4fa1-bb4a-9e533c98a470 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.652198] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f9ad48-8778-4301-812f-214e7ede9c17 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.666088] env[62070]: DEBUG nova.compute.provider_tree [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 660.670905] env[62070]: DEBUG nova.compute.manager [req-dc6fc126-6b0f-4c8c-b0c9-24415336c8d8 req-b112a9b5-631a-4d5e-ba79-3f3373dae287 service nova] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Received event network-vif-deleted-ef6f1c75-e8f0-4dd1-bd35-95e061959be1 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 660.849473] env[62070]: DEBUG oslo_concurrency.lockutils [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Releasing lock "refresh_cache-adccca24-ed77-410b-8b69-19137cadafbd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 660.849938] env[62070]: DEBUG nova.compute.manager [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 660.850173] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 660.850516] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-336d7720-9ebb-4e07-9781-c27ed9d0e591 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.862046] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c4ff53c-5a53-4742-acc5-58262a5ee0cf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.886912] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance adccca24-ed77-410b-8b69-19137cadafbd could not be found. [ 660.887212] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 660.887532] env[62070]: INFO nova.compute.manager [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Took 0.04 seconds to destroy the instance on the hypervisor. [ 660.887705] env[62070]: DEBUG oslo.service.loopingcall [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 660.887952] env[62070]: DEBUG nova.compute.manager [-] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 660.888081] env[62070]: DEBUG nova.network.neutron [-] [instance: adccca24-ed77-410b-8b69-19137cadafbd] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 660.911357] env[62070]: DEBUG nova.network.neutron [-] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 661.170833] env[62070]: DEBUG nova.scheduler.client.report [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 661.183474] env[62070]: DEBUG nova.compute.manager [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 661.213823] env[62070]: DEBUG nova.virt.hardware [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 661.214080] env[62070]: DEBUG nova.virt.hardware [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 661.214240] env[62070]: DEBUG nova.virt.hardware [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 661.214421] env[62070]: DEBUG nova.virt.hardware [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 661.214566] env[62070]: DEBUG nova.virt.hardware [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 661.214709] env[62070]: DEBUG nova.virt.hardware [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 661.214943] env[62070]: DEBUG nova.virt.hardware [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 661.215127] env[62070]: DEBUG nova.virt.hardware [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 661.215298] env[62070]: DEBUG nova.virt.hardware [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 661.215462] env[62070]: DEBUG nova.virt.hardware [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 661.215702] env[62070]: DEBUG nova.virt.hardware [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 661.216756] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d27678-bcdf-417b-bd6a-2767c0d032de {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.225566] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8691c29a-24b3-4215-a3e4-d9042e460b62 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.415670] env[62070]: DEBUG nova.network.neutron [-] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.417824] env[62070]: ERROR nova.compute.manager [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7af3fb3f-ac6d-4b68-b071-de50eab1a1f3, please check neutron logs for more information. [ 661.417824] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 661.417824] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 661.417824] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 661.417824] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 661.417824] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 661.417824] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 661.417824] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 661.417824] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 661.417824] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 661.417824] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 661.417824] env[62070]: ERROR nova.compute.manager raise self.value [ 661.417824] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 661.417824] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 661.417824] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 661.417824] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 661.418300] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 661.418300] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 661.418300] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7af3fb3f-ac6d-4b68-b071-de50eab1a1f3, please check neutron logs for more information. [ 661.418300] env[62070]: ERROR nova.compute.manager [ 661.418300] env[62070]: Traceback (most recent call last): [ 661.418300] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 661.418300] env[62070]: listener.cb(fileno) [ 661.418300] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 661.418300] env[62070]: result = function(*args, **kwargs) [ 661.418300] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 661.418300] env[62070]: return func(*args, **kwargs) [ 661.418300] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 661.418300] env[62070]: raise e [ 661.418300] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 661.418300] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 661.418300] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 661.418300] env[62070]: created_port_ids = self._update_ports_for_instance( [ 661.418300] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 661.418300] env[62070]: with excutils.save_and_reraise_exception(): [ 661.418300] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 661.418300] env[62070]: self.force_reraise() [ 661.418300] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 661.418300] env[62070]: raise self.value [ 661.418300] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 661.418300] env[62070]: updated_port = self._update_port( [ 661.418300] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 661.418300] env[62070]: _ensure_no_port_binding_failure(port) [ 661.418300] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 661.418300] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 661.419084] env[62070]: nova.exception.PortBindingFailed: Binding failed for port 7af3fb3f-ac6d-4b68-b071-de50eab1a1f3, please check neutron logs for more information. [ 661.419084] env[62070]: Removing descriptor: 16 [ 661.419084] env[62070]: ERROR nova.compute.manager [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7af3fb3f-ac6d-4b68-b071-de50eab1a1f3, please check neutron logs for more information. [ 661.419084] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Traceback (most recent call last): [ 661.419084] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 661.419084] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] yield resources [ 661.419084] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 661.419084] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] self.driver.spawn(context, instance, image_meta, [ 661.419084] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 661.419084] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 661.419084] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 661.419084] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] vm_ref = self.build_virtual_machine(instance, [ 661.419576] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 661.419576] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] vif_infos = vmwarevif.get_vif_info(self._session, [ 661.419576] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 661.419576] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] for vif in network_info: [ 661.419576] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 661.419576] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] return self._sync_wrapper(fn, *args, **kwargs) [ 661.419576] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 661.419576] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] self.wait() [ 661.419576] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 661.419576] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] self[:] = self._gt.wait() [ 661.419576] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 661.419576] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] return self._exit_event.wait() [ 661.419576] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 661.419929] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] result = hub.switch() [ 661.419929] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 661.419929] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] return self.greenlet.switch() [ 661.419929] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 661.419929] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] result = function(*args, **kwargs) [ 661.419929] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 661.419929] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] return func(*args, **kwargs) [ 661.419929] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 661.419929] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] raise e [ 661.419929] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 661.419929] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] nwinfo = self.network_api.allocate_for_instance( [ 661.419929] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 661.419929] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] created_port_ids = self._update_ports_for_instance( [ 661.420319] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 661.420319] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] with excutils.save_and_reraise_exception(): [ 661.420319] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 661.420319] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] self.force_reraise() [ 661.420319] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 661.420319] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] raise self.value [ 661.420319] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 661.420319] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] updated_port = self._update_port( [ 661.420319] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 661.420319] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] _ensure_no_port_binding_failure(port) [ 661.420319] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 661.420319] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] raise exception.PortBindingFailed(port_id=port['id']) [ 661.420652] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] nova.exception.PortBindingFailed: Binding failed for port 7af3fb3f-ac6d-4b68-b071-de50eab1a1f3, please check neutron logs for more information. [ 661.420652] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] [ 661.420652] env[62070]: INFO nova.compute.manager [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Terminating instance [ 661.420764] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "refresh_cache-efa18997-b502-4e2e-933a-a185ab9074d5" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 661.420920] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired lock "refresh_cache-efa18997-b502-4e2e-933a-a185ab9074d5" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.421096] env[62070]: DEBUG nova.network.neutron [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 661.680020] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.523s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 661.680020] env[62070]: DEBUG nova.compute.manager [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 661.682181] env[62070]: DEBUG oslo_concurrency.lockutils [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.085s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.683545] env[62070]: INFO nova.compute.claims [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 661.921961] env[62070]: INFO nova.compute.manager [-] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Took 1.03 seconds to deallocate network for instance. [ 661.926711] env[62070]: DEBUG nova.compute.claims [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 661.926884] env[62070]: DEBUG oslo_concurrency.lockutils [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.951212] env[62070]: DEBUG nova.network.neutron [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 662.098385] env[62070]: DEBUG nova.network.neutron [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.190474] env[62070]: DEBUG nova.compute.utils [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 662.195522] env[62070]: DEBUG nova.compute.manager [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 662.195522] env[62070]: DEBUG nova.network.neutron [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 662.268167] env[62070]: DEBUG nova.policy [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f7e4851b67af42129287208a62765bf1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bba9941a8b5b4e71823200acf37699e9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 662.609278] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Releasing lock "refresh_cache-efa18997-b502-4e2e-933a-a185ab9074d5" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 662.609278] env[62070]: DEBUG nova.compute.manager [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 662.609278] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 662.609278] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3bf74da2-17fb-4d86-9989-5bb09ebc20bd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.618093] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9cc80cc-ac6c-4637-8029-da605b1919b5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.643502] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance efa18997-b502-4e2e-933a-a185ab9074d5 could not be found. [ 662.643853] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 662.644317] env[62070]: INFO nova.compute.manager [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 662.644748] env[62070]: DEBUG oslo.service.loopingcall [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 662.645112] env[62070]: DEBUG nova.compute.manager [-] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 662.645457] env[62070]: DEBUG nova.network.neutron [-] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 662.661903] env[62070]: DEBUG nova.network.neutron [-] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 662.695320] env[62070]: DEBUG nova.compute.manager [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 662.706350] env[62070]: DEBUG nova.compute.manager [req-12f92d88-a75a-4b8a-bc27-0b015c50d27e req-7ebe73c5-ef3b-4003-a437-f45c3e33f875 service nova] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Received event network-changed-7af3fb3f-ac6d-4b68-b071-de50eab1a1f3 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 662.706687] env[62070]: DEBUG nova.compute.manager [req-12f92d88-a75a-4b8a-bc27-0b015c50d27e req-7ebe73c5-ef3b-4003-a437-f45c3e33f875 service nova] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Refreshing instance network info cache due to event network-changed-7af3fb3f-ac6d-4b68-b071-de50eab1a1f3. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 662.707113] env[62070]: DEBUG oslo_concurrency.lockutils [req-12f92d88-a75a-4b8a-bc27-0b015c50d27e req-7ebe73c5-ef3b-4003-a437-f45c3e33f875 service nova] Acquiring lock "refresh_cache-efa18997-b502-4e2e-933a-a185ab9074d5" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 662.708039] env[62070]: DEBUG oslo_concurrency.lockutils [req-12f92d88-a75a-4b8a-bc27-0b015c50d27e req-7ebe73c5-ef3b-4003-a437-f45c3e33f875 service nova] Acquired lock "refresh_cache-efa18997-b502-4e2e-933a-a185ab9074d5" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.708039] env[62070]: DEBUG nova.network.neutron [req-12f92d88-a75a-4b8a-bc27-0b015c50d27e req-7ebe73c5-ef3b-4003-a437-f45c3e33f875 service nova] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Refreshing network info cache for port 7af3fb3f-ac6d-4b68-b071-de50eab1a1f3 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 662.743239] env[62070]: DEBUG nova.network.neutron [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Successfully created port: b59e3bf6-60a3-4125-8867-7c2fee078882 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 663.143274] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ae65837-e9a7-401c-97de-4846d5dc0eb1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.149262] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24823722-cbe1-473b-9747-6a73878ee965 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.183372] env[62070]: DEBUG nova.network.neutron [-] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.186699] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99466419-e108-42ee-9692-403f2bdf6b6b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.195531] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85ce9a6-2798-4326-9165-30647eedb011 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.218731] env[62070]: DEBUG nova.compute.provider_tree [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 663.250852] env[62070]: DEBUG nova.network.neutron [req-12f92d88-a75a-4b8a-bc27-0b015c50d27e req-7ebe73c5-ef3b-4003-a437-f45c3e33f875 service nova] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 663.395060] env[62070]: DEBUG nova.network.neutron [req-12f92d88-a75a-4b8a-bc27-0b015c50d27e req-7ebe73c5-ef3b-4003-a437-f45c3e33f875 service nova] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.688583] env[62070]: INFO nova.compute.manager [-] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Took 1.04 seconds to deallocate network for instance. [ 663.689617] env[62070]: DEBUG nova.compute.claims [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 663.689617] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.724025] env[62070]: DEBUG nova.compute.manager [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 663.724847] env[62070]: DEBUG nova.scheduler.client.report [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 663.754093] env[62070]: DEBUG nova.virt.hardware [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 663.754093] env[62070]: DEBUG nova.virt.hardware [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 663.754093] env[62070]: DEBUG nova.virt.hardware [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 663.754259] env[62070]: DEBUG nova.virt.hardware [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 663.754259] env[62070]: DEBUG nova.virt.hardware [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 663.754259] env[62070]: DEBUG nova.virt.hardware [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 663.754372] env[62070]: DEBUG nova.virt.hardware [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 663.754642] env[62070]: DEBUG nova.virt.hardware [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 663.754954] env[62070]: DEBUG nova.virt.hardware [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 663.755259] env[62070]: DEBUG nova.virt.hardware [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 663.755538] env[62070]: DEBUG nova.virt.hardware [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 663.756770] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a0304b-a98b-4da2-9066-00c4040c67e2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.766960] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3aa42dd-ecae-4acb-812f-d73440b57380 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.897707] env[62070]: DEBUG oslo_concurrency.lockutils [req-12f92d88-a75a-4b8a-bc27-0b015c50d27e req-7ebe73c5-ef3b-4003-a437-f45c3e33f875 service nova] Releasing lock "refresh_cache-efa18997-b502-4e2e-933a-a185ab9074d5" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.897967] env[62070]: DEBUG nova.compute.manager [req-12f92d88-a75a-4b8a-bc27-0b015c50d27e req-7ebe73c5-ef3b-4003-a437-f45c3e33f875 service nova] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Received event network-vif-deleted-7af3fb3f-ac6d-4b68-b071-de50eab1a1f3 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 664.166523] env[62070]: ERROR nova.compute.manager [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b59e3bf6-60a3-4125-8867-7c2fee078882, please check neutron logs for more information. [ 664.166523] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 664.166523] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 664.166523] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 664.166523] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 664.166523] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 664.166523] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 664.166523] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 664.166523] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 664.166523] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 664.166523] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 664.166523] env[62070]: ERROR nova.compute.manager raise self.value [ 664.166523] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 664.166523] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 664.166523] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 664.166523] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 664.166951] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 664.166951] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 664.166951] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b59e3bf6-60a3-4125-8867-7c2fee078882, please check neutron logs for more information. [ 664.166951] env[62070]: ERROR nova.compute.manager [ 664.166951] env[62070]: Traceback (most recent call last): [ 664.166951] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 664.166951] env[62070]: listener.cb(fileno) [ 664.166951] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 664.166951] env[62070]: result = function(*args, **kwargs) [ 664.166951] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 664.166951] env[62070]: return func(*args, **kwargs) [ 664.166951] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 664.166951] env[62070]: raise e [ 664.166951] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 664.166951] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 664.166951] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 664.166951] env[62070]: created_port_ids = self._update_ports_for_instance( [ 664.166951] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 664.166951] env[62070]: with excutils.save_and_reraise_exception(): [ 664.166951] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 664.166951] env[62070]: self.force_reraise() [ 664.166951] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 664.166951] env[62070]: raise self.value [ 664.166951] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 664.166951] env[62070]: updated_port = self._update_port( [ 664.166951] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 664.166951] env[62070]: _ensure_no_port_binding_failure(port) [ 664.166951] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 664.166951] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 664.167685] env[62070]: nova.exception.PortBindingFailed: Binding failed for port b59e3bf6-60a3-4125-8867-7c2fee078882, please check neutron logs for more information. [ 664.167685] env[62070]: Removing descriptor: 16 [ 664.167685] env[62070]: ERROR nova.compute.manager [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b59e3bf6-60a3-4125-8867-7c2fee078882, please check neutron logs for more information. [ 664.167685] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Traceback (most recent call last): [ 664.167685] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 664.167685] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] yield resources [ 664.167685] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 664.167685] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] self.driver.spawn(context, instance, image_meta, [ 664.167685] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 664.167685] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 664.167685] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 664.167685] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] vm_ref = self.build_virtual_machine(instance, [ 664.168071] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 664.168071] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] vif_infos = vmwarevif.get_vif_info(self._session, [ 664.168071] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 664.168071] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] for vif in network_info: [ 664.168071] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 664.168071] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] return self._sync_wrapper(fn, *args, **kwargs) [ 664.168071] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 664.168071] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] self.wait() [ 664.168071] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 664.168071] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] self[:] = self._gt.wait() [ 664.168071] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 664.168071] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] return self._exit_event.wait() [ 664.168071] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 664.168376] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] result = hub.switch() [ 664.168376] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 664.168376] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] return self.greenlet.switch() [ 664.168376] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 664.168376] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] result = function(*args, **kwargs) [ 664.168376] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 664.168376] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] return func(*args, **kwargs) [ 664.168376] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 664.168376] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] raise e [ 664.168376] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 664.168376] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] nwinfo = self.network_api.allocate_for_instance( [ 664.168376] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 664.168376] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] created_port_ids = self._update_ports_for_instance( [ 664.168685] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 664.168685] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] with excutils.save_and_reraise_exception(): [ 664.168685] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 664.168685] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] self.force_reraise() [ 664.168685] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 664.168685] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] raise self.value [ 664.168685] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 664.168685] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] updated_port = self._update_port( [ 664.168685] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 664.168685] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] _ensure_no_port_binding_failure(port) [ 664.168685] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 664.168685] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] raise exception.PortBindingFailed(port_id=port['id']) [ 664.168997] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] nova.exception.PortBindingFailed: Binding failed for port b59e3bf6-60a3-4125-8867-7c2fee078882, please check neutron logs for more information. [ 664.168997] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] [ 664.168997] env[62070]: INFO nova.compute.manager [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Terminating instance [ 664.173034] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Acquiring lock "refresh_cache-9ec1b7a6-5ade-49a3-ba47-912bb328adb6" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.173034] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Acquired lock "refresh_cache-9ec1b7a6-5ade-49a3-ba47-912bb328adb6" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.173034] env[62070]: DEBUG nova.network.neutron [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 664.230622] env[62070]: DEBUG oslo_concurrency.lockutils [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.548s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 664.233023] env[62070]: DEBUG nova.compute.manager [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 664.234645] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.901s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 664.696980] env[62070]: DEBUG nova.network.neutron [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 664.736876] env[62070]: DEBUG nova.compute.manager [req-edbf1fa9-c880-4f24-948c-f0931c7397ff req-cfb14b59-495c-475e-ac8b-8760c45a0b67 service nova] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Received event network-changed-b59e3bf6-60a3-4125-8867-7c2fee078882 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 664.737158] env[62070]: DEBUG nova.compute.manager [req-edbf1fa9-c880-4f24-948c-f0931c7397ff req-cfb14b59-495c-475e-ac8b-8760c45a0b67 service nova] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Refreshing instance network info cache due to event network-changed-b59e3bf6-60a3-4125-8867-7c2fee078882. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 664.738562] env[62070]: DEBUG oslo_concurrency.lockutils [req-edbf1fa9-c880-4f24-948c-f0931c7397ff req-cfb14b59-495c-475e-ac8b-8760c45a0b67 service nova] Acquiring lock "refresh_cache-9ec1b7a6-5ade-49a3-ba47-912bb328adb6" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.739811] env[62070]: DEBUG nova.compute.utils [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 664.745594] env[62070]: DEBUG nova.compute.manager [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 664.745594] env[62070]: DEBUG nova.network.neutron [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 664.789736] env[62070]: DEBUG nova.policy [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '093d4b68ffd04d4d951f5be91bfc76e8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eac8e5edc8f14fff89aba7c8cb6cac5d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 664.802093] env[62070]: DEBUG nova.network.neutron [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.097978] env[62070]: DEBUG nova.network.neutron [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Successfully created port: a5142d5c-5bdb-43c6-90cd-7643fb62d918 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 665.133106] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335e329d-8efc-4bd5-9fde-bffa8707db53 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.143209] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-027a184c-ab22-42c5-8d76-df62db2129dc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.179465] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48378728-0a06-4f27-8f75-2fef1f2d646b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.187371] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503468f9-cff3-47b1-ab99-beacd078e435 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.205310] env[62070]: DEBUG nova.compute.provider_tree [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 665.245618] env[62070]: DEBUG nova.compute.manager [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 665.257166] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Acquiring lock "20e7a993-b1fb-4359-ab35-8b0f06ca0121" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.257166] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Lock "20e7a993-b1fb-4359-ab35-8b0f06ca0121" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.304168] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Releasing lock "refresh_cache-9ec1b7a6-5ade-49a3-ba47-912bb328adb6" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.304592] env[62070]: DEBUG nova.compute.manager [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 665.304782] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 665.305566] env[62070]: DEBUG oslo_concurrency.lockutils [req-edbf1fa9-c880-4f24-948c-f0931c7397ff req-cfb14b59-495c-475e-ac8b-8760c45a0b67 service nova] Acquired lock "refresh_cache-9ec1b7a6-5ade-49a3-ba47-912bb328adb6" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.306699] env[62070]: DEBUG nova.network.neutron [req-edbf1fa9-c880-4f24-948c-f0931c7397ff req-cfb14b59-495c-475e-ac8b-8760c45a0b67 service nova] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Refreshing network info cache for port b59e3bf6-60a3-4125-8867-7c2fee078882 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 665.306808] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0f1640eb-f978-4f22-a631-8477505ebbfb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.315925] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce1b9861-6c34-4b48-bde3-4ec3259311f9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.340422] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9ec1b7a6-5ade-49a3-ba47-912bb328adb6 could not be found. [ 665.340648] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 665.340826] env[62070]: INFO nova.compute.manager [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 665.341097] env[62070]: DEBUG oslo.service.loopingcall [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 665.341311] env[62070]: DEBUG nova.compute.manager [-] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 665.341403] env[62070]: DEBUG nova.network.neutron [-] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 665.358541] env[62070]: DEBUG nova.network.neutron [-] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 665.708311] env[62070]: DEBUG nova.scheduler.client.report [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 665.838265] env[62070]: DEBUG nova.network.neutron [req-edbf1fa9-c880-4f24-948c-f0931c7397ff req-cfb14b59-495c-475e-ac8b-8760c45a0b67 service nova] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 665.861569] env[62070]: DEBUG nova.network.neutron [-] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.938805] env[62070]: DEBUG nova.network.neutron [req-edbf1fa9-c880-4f24-948c-f0931c7397ff req-cfb14b59-495c-475e-ac8b-8760c45a0b67 service nova] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.977615] env[62070]: DEBUG nova.compute.manager [req-dc8b453d-05fc-4ecd-b46c-3863292e96ee req-ff94fb6b-6396-4ebe-8e24-795c9cc58ee6 service nova] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Received event network-changed-a5142d5c-5bdb-43c6-90cd-7643fb62d918 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 665.978294] env[62070]: DEBUG nova.compute.manager [req-dc8b453d-05fc-4ecd-b46c-3863292e96ee req-ff94fb6b-6396-4ebe-8e24-795c9cc58ee6 service nova] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Refreshing instance network info cache due to event network-changed-a5142d5c-5bdb-43c6-90cd-7643fb62d918. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 665.978294] env[62070]: DEBUG oslo_concurrency.lockutils [req-dc8b453d-05fc-4ecd-b46c-3863292e96ee req-ff94fb6b-6396-4ebe-8e24-795c9cc58ee6 service nova] Acquiring lock "refresh_cache-5da19104-b163-44cd-bb1f-68c4eb316ac1" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 665.978294] env[62070]: DEBUG oslo_concurrency.lockutils [req-dc8b453d-05fc-4ecd-b46c-3863292e96ee req-ff94fb6b-6396-4ebe-8e24-795c9cc58ee6 service nova] Acquired lock "refresh_cache-5da19104-b163-44cd-bb1f-68c4eb316ac1" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.978673] env[62070]: DEBUG nova.network.neutron [req-dc8b453d-05fc-4ecd-b46c-3863292e96ee req-ff94fb6b-6396-4ebe-8e24-795c9cc58ee6 service nova] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Refreshing network info cache for port a5142d5c-5bdb-43c6-90cd-7643fb62d918 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 666.144032] env[62070]: ERROR nova.compute.manager [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a5142d5c-5bdb-43c6-90cd-7643fb62d918, please check neutron logs for more information. [ 666.144032] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 666.144032] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 666.144032] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 666.144032] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 666.144032] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 666.144032] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 666.144032] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 666.144032] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 666.144032] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 666.144032] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 666.144032] env[62070]: ERROR nova.compute.manager raise self.value [ 666.144032] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 666.144032] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 666.144032] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 666.144032] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 666.144714] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 666.144714] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 666.144714] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a5142d5c-5bdb-43c6-90cd-7643fb62d918, please check neutron logs for more information. [ 666.144714] env[62070]: ERROR nova.compute.manager [ 666.144714] env[62070]: Traceback (most recent call last): [ 666.144714] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 666.144714] env[62070]: listener.cb(fileno) [ 666.144714] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 666.144714] env[62070]: result = function(*args, **kwargs) [ 666.144714] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 666.144714] env[62070]: return func(*args, **kwargs) [ 666.144714] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 666.144714] env[62070]: raise e [ 666.144714] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 666.144714] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 666.144714] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 666.144714] env[62070]: created_port_ids = self._update_ports_for_instance( [ 666.144714] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 666.144714] env[62070]: with excutils.save_and_reraise_exception(): [ 666.144714] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 666.144714] env[62070]: self.force_reraise() [ 666.144714] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 666.144714] env[62070]: raise self.value [ 666.144714] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 666.144714] env[62070]: updated_port = self._update_port( [ 666.144714] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 666.144714] env[62070]: _ensure_no_port_binding_failure(port) [ 666.144714] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 666.144714] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 666.145578] env[62070]: nova.exception.PortBindingFailed: Binding failed for port a5142d5c-5bdb-43c6-90cd-7643fb62d918, please check neutron logs for more information. [ 666.145578] env[62070]: Removing descriptor: 14 [ 666.213329] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.979s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.213959] env[62070]: ERROR nova.compute.manager [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9a8c0775-32a4-4f9c-9496-eb67187f85a0, please check neutron logs for more information. [ 666.213959] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Traceback (most recent call last): [ 666.213959] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 666.213959] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] self.driver.spawn(context, instance, image_meta, [ 666.213959] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 666.213959] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] self._vmops.spawn(context, instance, image_meta, injected_files, [ 666.213959] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 666.213959] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] vm_ref = self.build_virtual_machine(instance, [ 666.213959] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 666.213959] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] vif_infos = vmwarevif.get_vif_info(self._session, [ 666.213959] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 666.214274] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] for vif in network_info: [ 666.214274] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 666.214274] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] return self._sync_wrapper(fn, *args, **kwargs) [ 666.214274] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 666.214274] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] self.wait() [ 666.214274] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 666.214274] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] self[:] = self._gt.wait() [ 666.214274] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 666.214274] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] return self._exit_event.wait() [ 666.214274] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 666.214274] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] result = hub.switch() [ 666.214274] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 666.214274] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] return self.greenlet.switch() [ 666.214635] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 666.214635] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] result = function(*args, **kwargs) [ 666.214635] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 666.214635] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] return func(*args, **kwargs) [ 666.214635] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 666.214635] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] raise e [ 666.214635] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 666.214635] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] nwinfo = self.network_api.allocate_for_instance( [ 666.214635] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 666.214635] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] created_port_ids = self._update_ports_for_instance( [ 666.214635] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 666.214635] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] with excutils.save_and_reraise_exception(): [ 666.214635] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 666.215038] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] self.force_reraise() [ 666.215038] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 666.215038] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] raise self.value [ 666.215038] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 666.215038] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] updated_port = self._update_port( [ 666.215038] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 666.215038] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] _ensure_no_port_binding_failure(port) [ 666.215038] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 666.215038] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] raise exception.PortBindingFailed(port_id=port['id']) [ 666.215038] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] nova.exception.PortBindingFailed: Binding failed for port 9a8c0775-32a4-4f9c-9496-eb67187f85a0, please check neutron logs for more information. [ 666.215038] env[62070]: ERROR nova.compute.manager [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] [ 666.215365] env[62070]: DEBUG nova.compute.utils [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Binding failed for port 9a8c0775-32a4-4f9c-9496-eb67187f85a0, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 666.216017] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.685s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.219547] env[62070]: DEBUG nova.compute.manager [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Build of instance 95edf3d1-a987-4768-93be-1e045d7bfa99 was re-scheduled: Binding failed for port 9a8c0775-32a4-4f9c-9496-eb67187f85a0, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 666.220136] env[62070]: DEBUG nova.compute.manager [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 666.220326] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Acquiring lock "refresh_cache-95edf3d1-a987-4768-93be-1e045d7bfa99" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.220475] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Acquired lock "refresh_cache-95edf3d1-a987-4768-93be-1e045d7bfa99" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.220635] env[62070]: DEBUG nova.network.neutron [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 666.256354] env[62070]: DEBUG nova.compute.manager [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 666.282749] env[62070]: DEBUG nova.virt.hardware [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 666.282985] env[62070]: DEBUG nova.virt.hardware [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 666.283155] env[62070]: DEBUG nova.virt.hardware [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 666.283338] env[62070]: DEBUG nova.virt.hardware [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 666.283483] env[62070]: DEBUG nova.virt.hardware [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 666.283628] env[62070]: DEBUG nova.virt.hardware [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 666.283832] env[62070]: DEBUG nova.virt.hardware [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 666.283996] env[62070]: DEBUG nova.virt.hardware [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 666.284186] env[62070]: DEBUG nova.virt.hardware [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 666.284350] env[62070]: DEBUG nova.virt.hardware [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 666.284521] env[62070]: DEBUG nova.virt.hardware [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 666.285733] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc942907-7c61-4efc-9043-77431cb1578d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.295564] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8029dd9e-606a-4f3c-bafa-a7d2a7380379 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.309808] env[62070]: ERROR nova.compute.manager [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a5142d5c-5bdb-43c6-90cd-7643fb62d918, please check neutron logs for more information. [ 666.309808] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Traceback (most recent call last): [ 666.309808] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 666.309808] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] yield resources [ 666.309808] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 666.309808] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] self.driver.spawn(context, instance, image_meta, [ 666.309808] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 666.309808] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 666.309808] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 666.309808] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] vm_ref = self.build_virtual_machine(instance, [ 666.309808] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 666.310132] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] vif_infos = vmwarevif.get_vif_info(self._session, [ 666.310132] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 666.310132] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] for vif in network_info: [ 666.310132] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 666.310132] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] return self._sync_wrapper(fn, *args, **kwargs) [ 666.310132] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 666.310132] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] self.wait() [ 666.310132] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 666.310132] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] self[:] = self._gt.wait() [ 666.310132] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 666.310132] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] return self._exit_event.wait() [ 666.310132] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 666.310132] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] current.throw(*self._exc) [ 666.310448] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 666.310448] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] result = function(*args, **kwargs) [ 666.310448] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 666.310448] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] return func(*args, **kwargs) [ 666.310448] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 666.310448] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] raise e [ 666.310448] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 666.310448] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] nwinfo = self.network_api.allocate_for_instance( [ 666.310448] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 666.310448] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] created_port_ids = self._update_ports_for_instance( [ 666.310448] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 666.310448] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] with excutils.save_and_reraise_exception(): [ 666.310448] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 666.310747] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] self.force_reraise() [ 666.310747] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 666.310747] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] raise self.value [ 666.310747] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 666.310747] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] updated_port = self._update_port( [ 666.310747] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 666.310747] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] _ensure_no_port_binding_failure(port) [ 666.310747] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 666.310747] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] raise exception.PortBindingFailed(port_id=port['id']) [ 666.310747] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] nova.exception.PortBindingFailed: Binding failed for port a5142d5c-5bdb-43c6-90cd-7643fb62d918, please check neutron logs for more information. [ 666.310747] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] [ 666.310747] env[62070]: INFO nova.compute.manager [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Terminating instance [ 666.312202] env[62070]: DEBUG oslo_concurrency.lockutils [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "refresh_cache-5da19104-b163-44cd-bb1f-68c4eb316ac1" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.367237] env[62070]: INFO nova.compute.manager [-] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Took 1.03 seconds to deallocate network for instance. [ 666.369578] env[62070]: DEBUG nova.compute.claims [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 666.369755] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.440961] env[62070]: DEBUG oslo_concurrency.lockutils [req-edbf1fa9-c880-4f24-948c-f0931c7397ff req-cfb14b59-495c-475e-ac8b-8760c45a0b67 service nova] Releasing lock "refresh_cache-9ec1b7a6-5ade-49a3-ba47-912bb328adb6" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 666.441264] env[62070]: DEBUG nova.compute.manager [req-edbf1fa9-c880-4f24-948c-f0931c7397ff req-cfb14b59-495c-475e-ac8b-8760c45a0b67 service nova] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Received event network-vif-deleted-b59e3bf6-60a3-4125-8867-7c2fee078882 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 666.501345] env[62070]: DEBUG nova.network.neutron [req-dc8b453d-05fc-4ecd-b46c-3863292e96ee req-ff94fb6b-6396-4ebe-8e24-795c9cc58ee6 service nova] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 666.571658] env[62070]: DEBUG nova.network.neutron [req-dc8b453d-05fc-4ecd-b46c-3863292e96ee req-ff94fb6b-6396-4ebe-8e24-795c9cc58ee6 service nova] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.742011] env[62070]: DEBUG nova.network.neutron [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 666.817123] env[62070]: DEBUG nova.network.neutron [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.070271] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d341a4-1b45-4e14-8bb4-bf10863eb510 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.074325] env[62070]: DEBUG oslo_concurrency.lockutils [req-dc8b453d-05fc-4ecd-b46c-3863292e96ee req-ff94fb6b-6396-4ebe-8e24-795c9cc58ee6 service nova] Releasing lock "refresh_cache-5da19104-b163-44cd-bb1f-68c4eb316ac1" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 667.074911] env[62070]: DEBUG oslo_concurrency.lockutils [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquired lock "refresh_cache-5da19104-b163-44cd-bb1f-68c4eb316ac1" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.075259] env[62070]: DEBUG nova.network.neutron [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 667.080803] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2cb92bc-7cd4-4c3c-9886-845b26a0e899 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.110542] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d345ec23-403c-4d43-a419-dfb9b45f2bd3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.117527] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-675a3ae3-26e8-4f8e-88b8-553584b42929 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.131722] env[62070]: DEBUG nova.compute.provider_tree [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 667.321784] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Releasing lock "refresh_cache-95edf3d1-a987-4768-93be-1e045d7bfa99" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 667.322052] env[62070]: DEBUG nova.compute.manager [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 667.322238] env[62070]: DEBUG nova.compute.manager [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 667.322409] env[62070]: DEBUG nova.network.neutron [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 667.336602] env[62070]: DEBUG nova.network.neutron [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 667.597417] env[62070]: DEBUG nova.network.neutron [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 667.635112] env[62070]: DEBUG nova.scheduler.client.report [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 667.690870] env[62070]: DEBUG nova.network.neutron [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.839296] env[62070]: DEBUG nova.network.neutron [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.030839] env[62070]: DEBUG nova.compute.manager [req-4ed7cd37-9845-454b-a3e3-e0d4458ad7b5 req-b807bf64-3530-4e4c-a856-d3df1460caa6 service nova] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Received event network-vif-deleted-a5142d5c-5bdb-43c6-90cd-7643fb62d918 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 668.140615] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.924s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.141260] env[62070]: ERROR nova.compute.manager [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 18b8d97b-3f4c-445e-8136-989a05f41994, please check neutron logs for more information. [ 668.141260] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Traceback (most recent call last): [ 668.141260] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 668.141260] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] self.driver.spawn(context, instance, image_meta, [ 668.141260] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 668.141260] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] self._vmops.spawn(context, instance, image_meta, injected_files, [ 668.141260] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 668.141260] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] vm_ref = self.build_virtual_machine(instance, [ 668.141260] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 668.141260] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] vif_infos = vmwarevif.get_vif_info(self._session, [ 668.141260] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 668.141600] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] for vif in network_info: [ 668.141600] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 668.141600] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] return self._sync_wrapper(fn, *args, **kwargs) [ 668.141600] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 668.141600] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] self.wait() [ 668.141600] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 668.141600] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] self[:] = self._gt.wait() [ 668.141600] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 668.141600] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] return self._exit_event.wait() [ 668.141600] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 668.141600] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] result = hub.switch() [ 668.141600] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 668.141600] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] return self.greenlet.switch() [ 668.141953] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 668.141953] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] result = function(*args, **kwargs) [ 668.141953] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 668.141953] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] return func(*args, **kwargs) [ 668.141953] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 668.141953] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] raise e [ 668.141953] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 668.141953] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] nwinfo = self.network_api.allocate_for_instance( [ 668.141953] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 668.141953] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] created_port_ids = self._update_ports_for_instance( [ 668.141953] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 668.141953] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] with excutils.save_and_reraise_exception(): [ 668.141953] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 668.142336] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] self.force_reraise() [ 668.142336] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 668.142336] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] raise self.value [ 668.142336] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 668.142336] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] updated_port = self._update_port( [ 668.142336] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 668.142336] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] _ensure_no_port_binding_failure(port) [ 668.142336] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 668.142336] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] raise exception.PortBindingFailed(port_id=port['id']) [ 668.142336] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] nova.exception.PortBindingFailed: Binding failed for port 18b8d97b-3f4c-445e-8136-989a05f41994, please check neutron logs for more information. [ 668.142336] env[62070]: ERROR nova.compute.manager [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] [ 668.142724] env[62070]: DEBUG nova.compute.utils [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Binding failed for port 18b8d97b-3f4c-445e-8136-989a05f41994, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 668.143144] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.091s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.144740] env[62070]: INFO nova.compute.claims [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 668.147532] env[62070]: DEBUG nova.compute.manager [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Build of instance b0134b0f-23b4-4d34-b144-71ccdd9fba72 was re-scheduled: Binding failed for port 18b8d97b-3f4c-445e-8136-989a05f41994, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 668.147945] env[62070]: DEBUG nova.compute.manager [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 668.148185] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Acquiring lock "refresh_cache-b0134b0f-23b4-4d34-b144-71ccdd9fba72" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 668.148329] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Acquired lock "refresh_cache-b0134b0f-23b4-4d34-b144-71ccdd9fba72" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.148488] env[62070]: DEBUG nova.network.neutron [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 668.193062] env[62070]: DEBUG oslo_concurrency.lockutils [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Releasing lock "refresh_cache-5da19104-b163-44cd-bb1f-68c4eb316ac1" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 668.193587] env[62070]: DEBUG nova.compute.manager [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 668.193800] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 668.194073] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-04508bee-5e64-4950-890c-80dfec1e507f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.202883] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d76ab6ae-0dfa-4860-adeb-e1f2b91bc443 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.224796] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5da19104-b163-44cd-bb1f-68c4eb316ac1 could not be found. [ 668.225049] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 668.225234] env[62070]: INFO nova.compute.manager [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Took 0.03 seconds to destroy the instance on the hypervisor. [ 668.225491] env[62070]: DEBUG oslo.service.loopingcall [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 668.225731] env[62070]: DEBUG nova.compute.manager [-] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 668.225821] env[62070]: DEBUG nova.network.neutron [-] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 668.245396] env[62070]: DEBUG nova.network.neutron [-] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 668.341681] env[62070]: INFO nova.compute.manager [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 95edf3d1-a987-4768-93be-1e045d7bfa99] Took 1.02 seconds to deallocate network for instance. [ 668.668955] env[62070]: DEBUG nova.network.neutron [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 668.749587] env[62070]: DEBUG nova.network.neutron [-] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.749798] env[62070]: DEBUG nova.network.neutron [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.253460] env[62070]: INFO nova.compute.manager [-] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Took 1.03 seconds to deallocate network for instance. [ 669.253821] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Releasing lock "refresh_cache-b0134b0f-23b4-4d34-b144-71ccdd9fba72" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 669.254038] env[62070]: DEBUG nova.compute.manager [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 669.254216] env[62070]: DEBUG nova.compute.manager [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 669.254379] env[62070]: DEBUG nova.network.neutron [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 669.258119] env[62070]: DEBUG nova.compute.claims [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 669.258305] env[62070]: DEBUG oslo_concurrency.lockutils [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.275896] env[62070]: DEBUG nova.network.neutron [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 669.369035] env[62070]: INFO nova.scheduler.client.report [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Deleted allocations for instance 95edf3d1-a987-4768-93be-1e045d7bfa99 [ 669.510051] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860e73e7-c6de-40ce-ad94-e34022bb82ad {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.518068] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd2fa3e-44b8-41b5-934f-519403b2dd29 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.547061] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de557413-dd9b-44f6-9d5a-2025046295fc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.554276] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca9c86d3-c01a-4471-b06a-efc09c776633 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.566966] env[62070]: DEBUG nova.compute.provider_tree [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 669.781025] env[62070]: DEBUG nova.network.neutron [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.879970] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c926e67b-5387-49dc-b89c-40a189a7f3d7 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Lock "95edf3d1-a987-4768-93be-1e045d7bfa99" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.526s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.070079] env[62070]: DEBUG nova.scheduler.client.report [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 670.283154] env[62070]: INFO nova.compute.manager [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b0134b0f-23b4-4d34-b144-71ccdd9fba72] Took 1.03 seconds to deallocate network for instance. [ 670.382258] env[62070]: DEBUG nova.compute.manager [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 670.574828] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.432s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.575415] env[62070]: DEBUG nova.compute.manager [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 670.577976] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 14.821s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.578195] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.578348] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62070) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 670.579133] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.607s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.581950] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa04a9b3-9fd8-498a-abca-9dd895bb97dd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.590486] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-889dac4d-22cc-4837-a265-aa25c618f3e2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.604243] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c919e74b-5ac3-4e44-8fdc-a7cd4cb35f85 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.611065] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d944892-b2fa-4a99-83a0-ca5a0890a0ba {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.641987] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181579MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=62070) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 670.642175] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.908430] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 671.084371] env[62070]: DEBUG nova.compute.utils [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 671.085982] env[62070]: DEBUG nova.compute.manager [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 671.086178] env[62070]: DEBUG nova.network.neutron [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 671.145809] env[62070]: DEBUG nova.policy [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5594d22d582140578bccd3581fd610b6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6915fe8f9a5a407b92aa3f69ce007be1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 671.322239] env[62070]: INFO nova.scheduler.client.report [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Deleted allocations for instance b0134b0f-23b4-4d34-b144-71ccdd9fba72 [ 671.502951] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-260ca9fc-8230-4092-b05e-bb652cee3287 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.510988] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63fa9494-5aa8-4fcf-a7b3-180aa2b3c72c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.544633] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad5a0972-cedf-498b-9901-211c3c962cdc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.552244] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ead3d760-6d6b-40fc-8b11-0adc4cda7d61 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.566760] env[62070]: DEBUG nova.compute.provider_tree [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 671.591298] env[62070]: DEBUG nova.compute.manager [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 671.631215] env[62070]: DEBUG nova.network.neutron [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Successfully created port: 8470f264-09c8-4817-a711-e8ac92df552d {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 671.835531] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8576b7f8-0e27-4444-92a4-eb32008ddebf tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Lock "b0134b0f-23b4-4d34-b144-71ccdd9fba72" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.408s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.073515] env[62070]: DEBUG nova.scheduler.client.report [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 672.338214] env[62070]: DEBUG nova.compute.manager [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 672.580872] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.002s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.581534] env[62070]: ERROR nova.compute.manager [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e9b88278-987c-4e58-b15c-0dc5b6239a91, please check neutron logs for more information. [ 672.581534] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] Traceback (most recent call last): [ 672.581534] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 672.581534] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] self.driver.spawn(context, instance, image_meta, [ 672.581534] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 672.581534] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] self._vmops.spawn(context, instance, image_meta, injected_files, [ 672.581534] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 672.581534] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] vm_ref = self.build_virtual_machine(instance, [ 672.581534] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 672.581534] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] vif_infos = vmwarevif.get_vif_info(self._session, [ 672.581534] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 672.581874] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] for vif in network_info: [ 672.581874] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 672.581874] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] return self._sync_wrapper(fn, *args, **kwargs) [ 672.581874] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 672.581874] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] self.wait() [ 672.581874] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 672.581874] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] self[:] = self._gt.wait() [ 672.581874] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 672.581874] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] return self._exit_event.wait() [ 672.581874] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 672.581874] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] result = hub.switch() [ 672.581874] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 672.581874] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] return self.greenlet.switch() [ 672.582286] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 672.582286] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] result = function(*args, **kwargs) [ 672.582286] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 672.582286] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] return func(*args, **kwargs) [ 672.582286] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 672.582286] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] raise e [ 672.582286] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 672.582286] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] nwinfo = self.network_api.allocate_for_instance( [ 672.582286] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 672.582286] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] created_port_ids = self._update_ports_for_instance( [ 672.582286] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 672.582286] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] with excutils.save_and_reraise_exception(): [ 672.582286] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 672.582640] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] self.force_reraise() [ 672.582640] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 672.582640] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] raise self.value [ 672.582640] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 672.582640] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] updated_port = self._update_port( [ 672.582640] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 672.582640] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] _ensure_no_port_binding_failure(port) [ 672.582640] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 672.582640] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] raise exception.PortBindingFailed(port_id=port['id']) [ 672.582640] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] nova.exception.PortBindingFailed: Binding failed for port e9b88278-987c-4e58-b15c-0dc5b6239a91, please check neutron logs for more information. [ 672.582640] env[62070]: ERROR nova.compute.manager [instance: d41f73e0-a188-4cc4-8391-938178aad496] [ 672.583044] env[62070]: DEBUG nova.compute.utils [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Binding failed for port e9b88278-987c-4e58-b15c-0dc5b6239a91, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 672.583534] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.175s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 672.588041] env[62070]: DEBUG nova.compute.manager [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Build of instance d41f73e0-a188-4cc4-8391-938178aad496 was re-scheduled: Binding failed for port e9b88278-987c-4e58-b15c-0dc5b6239a91, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 672.588645] env[62070]: DEBUG nova.compute.manager [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 672.588883] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Acquiring lock "refresh_cache-d41f73e0-a188-4cc4-8391-938178aad496" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.589050] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Acquired lock "refresh_cache-d41f73e0-a188-4cc4-8391-938178aad496" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.589218] env[62070]: DEBUG nova.network.neutron [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 672.610012] env[62070]: DEBUG nova.compute.manager [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 672.632712] env[62070]: DEBUG nova.virt.hardware [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 672.632973] env[62070]: DEBUG nova.virt.hardware [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 672.633357] env[62070]: DEBUG nova.virt.hardware [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 672.633570] env[62070]: DEBUG nova.virt.hardware [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 672.633720] env[62070]: DEBUG nova.virt.hardware [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 672.633868] env[62070]: DEBUG nova.virt.hardware [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 672.634094] env[62070]: DEBUG nova.virt.hardware [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 672.634261] env[62070]: DEBUG nova.virt.hardware [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 672.634522] env[62070]: DEBUG nova.virt.hardware [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 672.634697] env[62070]: DEBUG nova.virt.hardware [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 672.635292] env[62070]: DEBUG nova.virt.hardware [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 672.636467] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18250ffd-8f44-4d2e-b0d3-20b6da6dd17e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.646016] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6111728e-185d-4531-8ea5-742a471a0148 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.747790] env[62070]: ERROR nova.compute.manager [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8470f264-09c8-4817-a711-e8ac92df552d, please check neutron logs for more information. [ 672.747790] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 672.747790] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 672.747790] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 672.747790] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 672.747790] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 672.747790] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 672.747790] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 672.747790] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 672.747790] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 672.747790] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 672.747790] env[62070]: ERROR nova.compute.manager raise self.value [ 672.747790] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 672.747790] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 672.747790] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 672.747790] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 672.748338] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 672.748338] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 672.748338] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8470f264-09c8-4817-a711-e8ac92df552d, please check neutron logs for more information. [ 672.748338] env[62070]: ERROR nova.compute.manager [ 672.748338] env[62070]: Traceback (most recent call last): [ 672.748338] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 672.748338] env[62070]: listener.cb(fileno) [ 672.748338] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 672.748338] env[62070]: result = function(*args, **kwargs) [ 672.748338] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 672.748338] env[62070]: return func(*args, **kwargs) [ 672.748338] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 672.748338] env[62070]: raise e [ 672.748338] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 672.748338] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 672.748338] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 672.748338] env[62070]: created_port_ids = self._update_ports_for_instance( [ 672.748338] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 672.748338] env[62070]: with excutils.save_and_reraise_exception(): [ 672.748338] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 672.748338] env[62070]: self.force_reraise() [ 672.748338] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 672.748338] env[62070]: raise self.value [ 672.748338] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 672.748338] env[62070]: updated_port = self._update_port( [ 672.748338] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 672.748338] env[62070]: _ensure_no_port_binding_failure(port) [ 672.748338] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 672.748338] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 672.751213] env[62070]: nova.exception.PortBindingFailed: Binding failed for port 8470f264-09c8-4817-a711-e8ac92df552d, please check neutron logs for more information. [ 672.751213] env[62070]: Removing descriptor: 14 [ 672.751213] env[62070]: ERROR nova.compute.manager [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8470f264-09c8-4817-a711-e8ac92df552d, please check neutron logs for more information. [ 672.751213] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Traceback (most recent call last): [ 672.751213] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 672.751213] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] yield resources [ 672.751213] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 672.751213] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] self.driver.spawn(context, instance, image_meta, [ 672.751213] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 672.751213] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 672.751213] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 672.751213] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] vm_ref = self.build_virtual_machine(instance, [ 672.752823] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 672.752823] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] vif_infos = vmwarevif.get_vif_info(self._session, [ 672.752823] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 672.752823] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] for vif in network_info: [ 672.752823] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 672.752823] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] return self._sync_wrapper(fn, *args, **kwargs) [ 672.752823] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 672.752823] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] self.wait() [ 672.752823] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 672.752823] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] self[:] = self._gt.wait() [ 672.752823] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 672.752823] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] return self._exit_event.wait() [ 672.752823] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 672.753242] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] result = hub.switch() [ 672.753242] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 672.753242] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] return self.greenlet.switch() [ 672.753242] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 672.753242] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] result = function(*args, **kwargs) [ 672.753242] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 672.753242] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] return func(*args, **kwargs) [ 672.753242] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 672.753242] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] raise e [ 672.753242] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 672.753242] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] nwinfo = self.network_api.allocate_for_instance( [ 672.753242] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 672.753242] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] created_port_ids = self._update_ports_for_instance( [ 672.753551] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 672.753551] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] with excutils.save_and_reraise_exception(): [ 672.753551] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 672.753551] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] self.force_reraise() [ 672.753551] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 672.753551] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] raise self.value [ 672.753551] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 672.753551] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] updated_port = self._update_port( [ 672.753551] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 672.753551] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] _ensure_no_port_binding_failure(port) [ 672.753551] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 672.753551] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] raise exception.PortBindingFailed(port_id=port['id']) [ 672.753845] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] nova.exception.PortBindingFailed: Binding failed for port 8470f264-09c8-4817-a711-e8ac92df552d, please check neutron logs for more information. [ 672.753845] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] [ 672.753845] env[62070]: INFO nova.compute.manager [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Terminating instance [ 672.753845] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Acquiring lock "refresh_cache-a71c58e7-89db-4ad2-92e0-5379b04b751c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.753845] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Acquired lock "refresh_cache-a71c58e7-89db-4ad2-92e0-5379b04b751c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.753845] env[62070]: DEBUG nova.network.neutron [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 672.827929] env[62070]: DEBUG nova.compute.manager [req-6f60cd79-9483-474b-a8ee-7657a9cd1171 req-7241090c-7e49-43a9-bd08-085d2365e9b9 service nova] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Received event network-changed-8470f264-09c8-4817-a711-e8ac92df552d {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 672.828207] env[62070]: DEBUG nova.compute.manager [req-6f60cd79-9483-474b-a8ee-7657a9cd1171 req-7241090c-7e49-43a9-bd08-085d2365e9b9 service nova] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Refreshing instance network info cache due to event network-changed-8470f264-09c8-4817-a711-e8ac92df552d. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 672.828458] env[62070]: DEBUG oslo_concurrency.lockutils [req-6f60cd79-9483-474b-a8ee-7657a9cd1171 req-7241090c-7e49-43a9-bd08-085d2365e9b9 service nova] Acquiring lock "refresh_cache-a71c58e7-89db-4ad2-92e0-5379b04b751c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.870465] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.120739] env[62070]: DEBUG nova.network.neutron [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 673.240908] env[62070]: DEBUG nova.network.neutron [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.279942] env[62070]: DEBUG nova.network.neutron [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 673.286287] env[62070]: DEBUG oslo_concurrency.lockutils [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Acquiring lock "1ce155c8-9a10-4eff-b428-31889aa8f638" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.286527] env[62070]: DEBUG oslo_concurrency.lockutils [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Lock "1ce155c8-9a10-4eff-b428-31889aa8f638" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.339926] env[62070]: DEBUG nova.network.neutron [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.436165] env[62070]: DEBUG oslo_concurrency.lockutils [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Acquiring lock "b7fdf23e-1e39-4745-ae84-38b7fa89aa5d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.436394] env[62070]: DEBUG oslo_concurrency.lockutils [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Lock "b7fdf23e-1e39-4745-ae84-38b7fa89aa5d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.518980] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c423e19-f35f-40a5-b8fe-9deb9e3cf49f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.527068] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-767f9d79-f2f3-452d-9a8e-522b6ced456e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.556546] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-110e4553-c47e-417e-b888-1c226d4a3545 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.564093] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca899f47-296d-4b2b-918c-adad9697b381 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.577557] env[62070]: DEBUG nova.compute.provider_tree [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 673.744780] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Releasing lock "refresh_cache-d41f73e0-a188-4cc4-8391-938178aad496" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.745115] env[62070]: DEBUG nova.compute.manager [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 673.745317] env[62070]: DEBUG nova.compute.manager [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 673.745483] env[62070]: DEBUG nova.network.neutron [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 673.760113] env[62070]: DEBUG nova.network.neutron [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 673.842482] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Releasing lock "refresh_cache-a71c58e7-89db-4ad2-92e0-5379b04b751c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.842997] env[62070]: DEBUG nova.compute.manager [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 673.843262] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 673.843633] env[62070]: DEBUG oslo_concurrency.lockutils [req-6f60cd79-9483-474b-a8ee-7657a9cd1171 req-7241090c-7e49-43a9-bd08-085d2365e9b9 service nova] Acquired lock "refresh_cache-a71c58e7-89db-4ad2-92e0-5379b04b751c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.843859] env[62070]: DEBUG nova.network.neutron [req-6f60cd79-9483-474b-a8ee-7657a9cd1171 req-7241090c-7e49-43a9-bd08-085d2365e9b9 service nova] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Refreshing network info cache for port 8470f264-09c8-4817-a711-e8ac92df552d {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 673.845218] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aad6c5d1-dd24-4b35-a016-9cf39632f079 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.854676] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad51537d-8347-4a1c-b808-dc255505dec8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.876932] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a71c58e7-89db-4ad2-92e0-5379b04b751c could not be found. [ 673.877116] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 673.877304] env[62070]: INFO nova.compute.manager [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Took 0.03 seconds to destroy the instance on the hypervisor. [ 673.877530] env[62070]: DEBUG oslo.service.loopingcall [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 673.877718] env[62070]: DEBUG nova.compute.manager [-] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 673.877808] env[62070]: DEBUG nova.network.neutron [-] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 673.891746] env[62070]: DEBUG nova.network.neutron [-] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 674.080628] env[62070]: DEBUG nova.scheduler.client.report [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 674.264410] env[62070]: DEBUG nova.network.neutron [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.362328] env[62070]: DEBUG nova.network.neutron [req-6f60cd79-9483-474b-a8ee-7657a9cd1171 req-7241090c-7e49-43a9-bd08-085d2365e9b9 service nova] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 674.393924] env[62070]: DEBUG nova.network.neutron [-] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.422549] env[62070]: DEBUG nova.network.neutron [req-6f60cd79-9483-474b-a8ee-7657a9cd1171 req-7241090c-7e49-43a9-bd08-085d2365e9b9 service nova] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.585545] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.002s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.586551] env[62070]: ERROR nova.compute.manager [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bfdbfcd7-c0eb-419c-a533-8f9444d52664, please check neutron logs for more information. [ 674.586551] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Traceback (most recent call last): [ 674.586551] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 674.586551] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] self.driver.spawn(context, instance, image_meta, [ 674.586551] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 674.586551] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] self._vmops.spawn(context, instance, image_meta, injected_files, [ 674.586551] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 674.586551] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] vm_ref = self.build_virtual_machine(instance, [ 674.586551] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 674.586551] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] vif_infos = vmwarevif.get_vif_info(self._session, [ 674.586551] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 674.586920] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] for vif in network_info: [ 674.586920] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 674.586920] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] return self._sync_wrapper(fn, *args, **kwargs) [ 674.586920] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 674.586920] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] self.wait() [ 674.586920] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 674.586920] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] self[:] = self._gt.wait() [ 674.586920] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 674.586920] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] return self._exit_event.wait() [ 674.586920] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 674.586920] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] result = hub.switch() [ 674.586920] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 674.586920] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] return self.greenlet.switch() [ 674.587322] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 674.587322] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] result = function(*args, **kwargs) [ 674.587322] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 674.587322] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] return func(*args, **kwargs) [ 674.587322] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 674.587322] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] raise e [ 674.587322] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 674.587322] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] nwinfo = self.network_api.allocate_for_instance( [ 674.587322] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 674.587322] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] created_port_ids = self._update_ports_for_instance( [ 674.587322] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 674.587322] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] with excutils.save_and_reraise_exception(): [ 674.587322] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 674.587693] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] self.force_reraise() [ 674.587693] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 674.587693] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] raise self.value [ 674.587693] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 674.587693] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] updated_port = self._update_port( [ 674.587693] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 674.587693] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] _ensure_no_port_binding_failure(port) [ 674.587693] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 674.587693] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] raise exception.PortBindingFailed(port_id=port['id']) [ 674.587693] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] nova.exception.PortBindingFailed: Binding failed for port bfdbfcd7-c0eb-419c-a533-8f9444d52664, please check neutron logs for more information. [ 674.587693] env[62070]: ERROR nova.compute.manager [instance: a5b98f92-d287-4d40-8a21-d2de64026970] [ 674.588018] env[62070]: DEBUG nova.compute.utils [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Binding failed for port bfdbfcd7-c0eb-419c-a533-8f9444d52664, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 674.588408] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.661s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.590131] env[62070]: INFO nova.compute.claims [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 674.592344] env[62070]: DEBUG nova.compute.manager [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Build of instance a5b98f92-d287-4d40-8a21-d2de64026970 was re-scheduled: Binding failed for port bfdbfcd7-c0eb-419c-a533-8f9444d52664, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 674.592753] env[62070]: DEBUG nova.compute.manager [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 674.592980] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Acquiring lock "refresh_cache-a5b98f92-d287-4d40-8a21-d2de64026970" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 674.593137] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Acquired lock "refresh_cache-a5b98f92-d287-4d40-8a21-d2de64026970" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.593294] env[62070]: DEBUG nova.network.neutron [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 674.773054] env[62070]: INFO nova.compute.manager [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] [instance: d41f73e0-a188-4cc4-8391-938178aad496] Took 1.03 seconds to deallocate network for instance. [ 674.848645] env[62070]: DEBUG nova.compute.manager [req-27fdf8ee-a3bc-47c5-b02f-cbda7e3ac4d9 req-3176e926-90bf-49cf-800f-ad684a6f7bf0 service nova] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Received event network-vif-deleted-8470f264-09c8-4817-a711-e8ac92df552d {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 674.897060] env[62070]: INFO nova.compute.manager [-] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Took 1.02 seconds to deallocate network for instance. [ 674.899378] env[62070]: DEBUG nova.compute.claims [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 674.899378] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.925437] env[62070]: DEBUG oslo_concurrency.lockutils [req-6f60cd79-9483-474b-a8ee-7657a9cd1171 req-7241090c-7e49-43a9-bd08-085d2365e9b9 service nova] Releasing lock "refresh_cache-a71c58e7-89db-4ad2-92e0-5379b04b751c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 675.113074] env[62070]: DEBUG nova.network.neutron [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 675.218945] env[62070]: DEBUG nova.network.neutron [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.721952] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Releasing lock "refresh_cache-a5b98f92-d287-4d40-8a21-d2de64026970" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 675.722223] env[62070]: DEBUG nova.compute.manager [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 675.722386] env[62070]: DEBUG nova.compute.manager [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 675.722549] env[62070]: DEBUG nova.network.neutron [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 675.737594] env[62070]: DEBUG nova.network.neutron [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 675.799938] env[62070]: INFO nova.scheduler.client.report [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Deleted allocations for instance d41f73e0-a188-4cc4-8391-938178aad496 [ 676.060204] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53120ed1-d630-47a6-abc8-12c0c53a570d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.074980] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c3506b-34d8-4453-bf2b-b45ad42cca93 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.120801] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ca27d58-280b-43e5-bbea-44d6484ac4fb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.127680] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-701cf75d-01ee-49f1-ac37-f91727f4fc3c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.140844] env[62070]: DEBUG nova.compute.provider_tree [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 676.240783] env[62070]: DEBUG nova.network.neutron [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.308732] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a4dda7df-1307-4390-b3d0-8b40b9135ae8 tempest-ImagesOneServerTestJSON-651636145 tempest-ImagesOneServerTestJSON-651636145-project-member] Lock "d41f73e0-a188-4cc4-8391-938178aad496" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.481s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.644056] env[62070]: DEBUG nova.scheduler.client.report [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 676.743702] env[62070]: INFO nova.compute.manager [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] [instance: a5b98f92-d287-4d40-8a21-d2de64026970] Took 1.02 seconds to deallocate network for instance. [ 676.811538] env[62070]: DEBUG nova.compute.manager [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 677.149413] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.561s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 677.149937] env[62070]: DEBUG nova.compute.manager [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 677.152810] env[62070]: DEBUG oslo_concurrency.lockutils [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.226s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 677.342705] env[62070]: DEBUG oslo_concurrency.lockutils [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.657161] env[62070]: DEBUG nova.compute.utils [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 677.665252] env[62070]: DEBUG nova.compute.manager [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 677.665434] env[62070]: DEBUG nova.network.neutron [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 677.738339] env[62070]: DEBUG nova.policy [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8f97e86904ce4d88ada76c6690434c57', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2816ec86c038454d9afb25a33165cc94', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 677.772979] env[62070]: INFO nova.scheduler.client.report [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Deleted allocations for instance a5b98f92-d287-4d40-8a21-d2de64026970 [ 678.069092] env[62070]: DEBUG nova.network.neutron [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Successfully created port: e2dc7d43-c5bb-4bda-a4db-bcb59bb3342d {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 678.091374] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc562df-5d7f-4ce5-945b-e0a35b84a17e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.101585] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d663b8-959d-40b3-a6b7-ccbb1c940d8f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.137639] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db80d19-e90c-4928-acb8-ead28db5a406 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.145384] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b1564f-d84f-44f7-aad0-1f98b57dc6aa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.159426] env[62070]: DEBUG nova.compute.provider_tree [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 678.166233] env[62070]: DEBUG nova.compute.manager [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 678.281205] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5085d6a6-6f80-4417-bbf7-554feeae07b1 tempest-ServerActionsTestOtherB-843110490 tempest-ServerActionsTestOtherB-843110490-project-member] Lock "a5b98f92-d287-4d40-8a21-d2de64026970" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.197s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.669105] env[62070]: DEBUG nova.scheduler.client.report [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 678.784848] env[62070]: DEBUG nova.compute.manager [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 679.162175] env[62070]: DEBUG nova.compute.manager [req-52117424-22ce-4f84-ac7c-30ae8cbead9a req-daf6fe46-0224-423c-902a-ef7f5b0fd77d service nova] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Received event network-changed-e2dc7d43-c5bb-4bda-a4db-bcb59bb3342d {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 679.162365] env[62070]: DEBUG nova.compute.manager [req-52117424-22ce-4f84-ac7c-30ae8cbead9a req-daf6fe46-0224-423c-902a-ef7f5b0fd77d service nova] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Refreshing instance network info cache due to event network-changed-e2dc7d43-c5bb-4bda-a4db-bcb59bb3342d. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 679.162608] env[62070]: DEBUG oslo_concurrency.lockutils [req-52117424-22ce-4f84-ac7c-30ae8cbead9a req-daf6fe46-0224-423c-902a-ef7f5b0fd77d service nova] Acquiring lock "refresh_cache-2c1dfa78-d300-4505-9f87-8e11a4973af3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 679.162753] env[62070]: DEBUG oslo_concurrency.lockutils [req-52117424-22ce-4f84-ac7c-30ae8cbead9a req-daf6fe46-0224-423c-902a-ef7f5b0fd77d service nova] Acquired lock "refresh_cache-2c1dfa78-d300-4505-9f87-8e11a4973af3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.162914] env[62070]: DEBUG nova.network.neutron [req-52117424-22ce-4f84-ac7c-30ae8cbead9a req-daf6fe46-0224-423c-902a-ef7f5b0fd77d service nova] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Refreshing network info cache for port e2dc7d43-c5bb-4bda-a4db-bcb59bb3342d {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 679.177725] env[62070]: DEBUG nova.compute.manager [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 679.180461] env[62070]: DEBUG oslo_concurrency.lockutils [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.028s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 679.181207] env[62070]: ERROR nova.compute.manager [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ef6f1c75-e8f0-4dd1-bd35-95e061959be1, please check neutron logs for more information. [ 679.181207] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] Traceback (most recent call last): [ 679.181207] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 679.181207] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] self.driver.spawn(context, instance, image_meta, [ 679.181207] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 679.181207] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 679.181207] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 679.181207] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] vm_ref = self.build_virtual_machine(instance, [ 679.181207] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 679.181207] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] vif_infos = vmwarevif.get_vif_info(self._session, [ 679.181207] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 679.181539] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] for vif in network_info: [ 679.181539] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 679.181539] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] return self._sync_wrapper(fn, *args, **kwargs) [ 679.181539] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 679.181539] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] self.wait() [ 679.181539] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 679.181539] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] self[:] = self._gt.wait() [ 679.181539] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 679.181539] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] return self._exit_event.wait() [ 679.181539] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 679.181539] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] result = hub.switch() [ 679.181539] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 679.181539] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] return self.greenlet.switch() [ 679.181907] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 679.181907] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] result = function(*args, **kwargs) [ 679.181907] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 679.181907] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] return func(*args, **kwargs) [ 679.181907] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 679.181907] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] raise e [ 679.181907] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 679.181907] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] nwinfo = self.network_api.allocate_for_instance( [ 679.181907] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 679.181907] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] created_port_ids = self._update_ports_for_instance( [ 679.181907] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 679.181907] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] with excutils.save_and_reraise_exception(): [ 679.181907] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 679.182294] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] self.force_reraise() [ 679.182294] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 679.182294] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] raise self.value [ 679.182294] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 679.182294] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] updated_port = self._update_port( [ 679.182294] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 679.182294] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] _ensure_no_port_binding_failure(port) [ 679.182294] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 679.182294] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] raise exception.PortBindingFailed(port_id=port['id']) [ 679.182294] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] nova.exception.PortBindingFailed: Binding failed for port ef6f1c75-e8f0-4dd1-bd35-95e061959be1, please check neutron logs for more information. [ 679.182294] env[62070]: ERROR nova.compute.manager [instance: adccca24-ed77-410b-8b69-19137cadafbd] [ 679.182604] env[62070]: DEBUG nova.compute.utils [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Binding failed for port ef6f1c75-e8f0-4dd1-bd35-95e061959be1, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 679.184222] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.494s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 679.190440] env[62070]: DEBUG nova.compute.manager [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Build of instance adccca24-ed77-410b-8b69-19137cadafbd was re-scheduled: Binding failed for port ef6f1c75-e8f0-4dd1-bd35-95e061959be1, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 679.191242] env[62070]: DEBUG nova.compute.manager [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 679.191475] env[62070]: DEBUG oslo_concurrency.lockutils [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Acquiring lock "refresh_cache-adccca24-ed77-410b-8b69-19137cadafbd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 679.191755] env[62070]: DEBUG oslo_concurrency.lockutils [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Acquired lock "refresh_cache-adccca24-ed77-410b-8b69-19137cadafbd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.191822] env[62070]: DEBUG nova.network.neutron [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 679.209058] env[62070]: DEBUG nova.virt.hardware [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 679.212095] env[62070]: DEBUG nova.virt.hardware [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 679.212095] env[62070]: DEBUG nova.virt.hardware [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 679.212095] env[62070]: DEBUG nova.virt.hardware [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 679.212095] env[62070]: DEBUG nova.virt.hardware [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 679.212095] env[62070]: DEBUG nova.virt.hardware [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 679.212430] env[62070]: DEBUG nova.virt.hardware [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 679.212430] env[62070]: DEBUG nova.virt.hardware [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 679.212430] env[62070]: DEBUG nova.virt.hardware [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 679.212430] env[62070]: DEBUG nova.virt.hardware [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 679.212430] env[62070]: DEBUG nova.virt.hardware [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 679.213673] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99f5c724-b430-4a9c-84da-9f3d4d00d24a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.222642] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbac5cf5-e6da-43f4-8295-a127a42cf11f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.317595] env[62070]: DEBUG oslo_concurrency.lockutils [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 679.334088] env[62070]: ERROR nova.compute.manager [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e2dc7d43-c5bb-4bda-a4db-bcb59bb3342d, please check neutron logs for more information. [ 679.334088] env[62070]: ERROR nova.compute.manager Traceback (most recent call last): [ 679.334088] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 679.334088] env[62070]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 679.334088] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 679.334088] env[62070]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 679.334088] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 679.334088] env[62070]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 679.334088] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 679.334088] env[62070]: ERROR nova.compute.manager self.force_reraise() [ 679.334088] env[62070]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 679.334088] env[62070]: ERROR nova.compute.manager raise self.value [ 679.334088] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 679.334088] env[62070]: ERROR nova.compute.manager updated_port = self._update_port( [ 679.334088] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 679.334088] env[62070]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 679.334708] env[62070]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 679.334708] env[62070]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 679.334708] env[62070]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e2dc7d43-c5bb-4bda-a4db-bcb59bb3342d, please check neutron logs for more information. [ 679.334708] env[62070]: ERROR nova.compute.manager [ 679.334708] env[62070]: Traceback (most recent call last): [ 679.334708] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 679.334708] env[62070]: listener.cb(fileno) [ 679.334708] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 679.334708] env[62070]: result = function(*args, **kwargs) [ 679.334708] env[62070]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 679.334708] env[62070]: return func(*args, **kwargs) [ 679.334708] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 679.334708] env[62070]: raise e [ 679.334708] env[62070]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 679.334708] env[62070]: nwinfo = self.network_api.allocate_for_instance( [ 679.334708] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 679.334708] env[62070]: created_port_ids = self._update_ports_for_instance( [ 679.334708] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 679.334708] env[62070]: with excutils.save_and_reraise_exception(): [ 679.334708] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 679.334708] env[62070]: self.force_reraise() [ 679.334708] env[62070]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 679.334708] env[62070]: raise self.value [ 679.334708] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 679.334708] env[62070]: updated_port = self._update_port( [ 679.334708] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 679.334708] env[62070]: _ensure_no_port_binding_failure(port) [ 679.334708] env[62070]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 679.334708] env[62070]: raise exception.PortBindingFailed(port_id=port['id']) [ 679.336875] env[62070]: nova.exception.PortBindingFailed: Binding failed for port e2dc7d43-c5bb-4bda-a4db-bcb59bb3342d, please check neutron logs for more information. [ 679.336875] env[62070]: Removing descriptor: 14 [ 679.336875] env[62070]: ERROR nova.compute.manager [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e2dc7d43-c5bb-4bda-a4db-bcb59bb3342d, please check neutron logs for more information. [ 679.336875] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Traceback (most recent call last): [ 679.336875] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 679.336875] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] yield resources [ 679.336875] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 679.336875] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] self.driver.spawn(context, instance, image_meta, [ 679.336875] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 679.336875] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 679.336875] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 679.336875] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] vm_ref = self.build_virtual_machine(instance, [ 679.337395] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 679.337395] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] vif_infos = vmwarevif.get_vif_info(self._session, [ 679.337395] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 679.337395] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] for vif in network_info: [ 679.337395] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 679.337395] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] return self._sync_wrapper(fn, *args, **kwargs) [ 679.337395] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 679.337395] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] self.wait() [ 679.337395] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 679.337395] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] self[:] = self._gt.wait() [ 679.337395] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 679.337395] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] return self._exit_event.wait() [ 679.337395] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 679.338302] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] result = hub.switch() [ 679.338302] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 679.338302] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] return self.greenlet.switch() [ 679.338302] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 679.338302] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] result = function(*args, **kwargs) [ 679.338302] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 679.338302] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] return func(*args, **kwargs) [ 679.338302] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 679.338302] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] raise e [ 679.338302] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 679.338302] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] nwinfo = self.network_api.allocate_for_instance( [ 679.338302] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 679.338302] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] created_port_ids = self._update_ports_for_instance( [ 679.339403] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 679.339403] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] with excutils.save_and_reraise_exception(): [ 679.339403] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 679.339403] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] self.force_reraise() [ 679.339403] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 679.339403] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] raise self.value [ 679.339403] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 679.339403] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] updated_port = self._update_port( [ 679.339403] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 679.339403] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] _ensure_no_port_binding_failure(port) [ 679.339403] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 679.339403] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] raise exception.PortBindingFailed(port_id=port['id']) [ 679.343117] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] nova.exception.PortBindingFailed: Binding failed for port e2dc7d43-c5bb-4bda-a4db-bcb59bb3342d, please check neutron logs for more information. [ 679.343117] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] [ 679.343117] env[62070]: INFO nova.compute.manager [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Terminating instance [ 679.343117] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Acquiring lock "refresh_cache-2c1dfa78-d300-4505-9f87-8e11a4973af3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 679.681655] env[62070]: DEBUG nova.network.neutron [req-52117424-22ce-4f84-ac7c-30ae8cbead9a req-daf6fe46-0224-423c-902a-ef7f5b0fd77d service nova] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 679.723140] env[62070]: DEBUG nova.network.neutron [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 679.793108] env[62070]: DEBUG nova.network.neutron [req-52117424-22ce-4f84-ac7c-30ae8cbead9a req-daf6fe46-0224-423c-902a-ef7f5b0fd77d service nova] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.888339] env[62070]: DEBUG nova.network.neutron [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.143496] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13f1f217-f1f0-4ce9-be3c-bcd3992eb831 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.154411] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ff4943e-64d3-4c19-9f90-b7ab79b67044 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.191150] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3da23f4e-9001-4d38-92e2-1f52e45c145d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.199366] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c467a4d2-4a0b-4659-80af-997f336b69b0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.215353] env[62070]: DEBUG nova.compute.provider_tree [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 680.299379] env[62070]: DEBUG oslo_concurrency.lockutils [req-52117424-22ce-4f84-ac7c-30ae8cbead9a req-daf6fe46-0224-423c-902a-ef7f5b0fd77d service nova] Releasing lock "refresh_cache-2c1dfa78-d300-4505-9f87-8e11a4973af3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 680.299850] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Acquired lock "refresh_cache-2c1dfa78-d300-4505-9f87-8e11a4973af3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.300974] env[62070]: DEBUG nova.network.neutron [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 680.391359] env[62070]: DEBUG oslo_concurrency.lockutils [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Releasing lock "refresh_cache-adccca24-ed77-410b-8b69-19137cadafbd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 680.391359] env[62070]: DEBUG nova.compute.manager [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 680.391562] env[62070]: DEBUG nova.compute.manager [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 680.391687] env[62070]: DEBUG nova.network.neutron [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 680.412593] env[62070]: DEBUG nova.network.neutron [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 680.719025] env[62070]: DEBUG nova.scheduler.client.report [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 680.829051] env[62070]: DEBUG nova.network.neutron [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 680.841430] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "10672096-00ba-4481-8ab3-085a185076db" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 680.841872] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "10672096-00ba-4481-8ab3-085a185076db" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 680.916548] env[62070]: DEBUG nova.network.neutron [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.943022] env[62070]: DEBUG nova.network.neutron [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.140566] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "2368b649-f931-454c-92cc-971df4155d90" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.140918] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "2368b649-f931-454c-92cc-971df4155d90" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 681.197503] env[62070]: DEBUG nova.compute.manager [req-2bfdb2c9-d1fe-40c7-8056-1f4ce48c714f req-1e29ec2f-6d9e-4754-a8c1-cd1b79aec713 service nova] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Received event network-vif-deleted-e2dc7d43-c5bb-4bda-a4db-bcb59bb3342d {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 681.223887] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.040s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.224678] env[62070]: ERROR nova.compute.manager [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7af3fb3f-ac6d-4b68-b071-de50eab1a1f3, please check neutron logs for more information. [ 681.224678] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Traceback (most recent call last): [ 681.224678] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 681.224678] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] self.driver.spawn(context, instance, image_meta, [ 681.224678] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 681.224678] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 681.224678] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 681.224678] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] vm_ref = self.build_virtual_machine(instance, [ 681.224678] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 681.224678] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] vif_infos = vmwarevif.get_vif_info(self._session, [ 681.224678] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 681.225027] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] for vif in network_info: [ 681.225027] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 681.225027] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] return self._sync_wrapper(fn, *args, **kwargs) [ 681.225027] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 681.225027] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] self.wait() [ 681.225027] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 681.225027] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] self[:] = self._gt.wait() [ 681.225027] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 681.225027] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] return self._exit_event.wait() [ 681.225027] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 681.225027] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] result = hub.switch() [ 681.225027] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 681.225027] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] return self.greenlet.switch() [ 681.225341] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 681.225341] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] result = function(*args, **kwargs) [ 681.225341] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 681.225341] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] return func(*args, **kwargs) [ 681.225341] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 681.225341] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] raise e [ 681.225341] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 681.225341] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] nwinfo = self.network_api.allocate_for_instance( [ 681.225341] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 681.225341] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] created_port_ids = self._update_ports_for_instance( [ 681.225341] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 681.225341] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] with excutils.save_and_reraise_exception(): [ 681.225341] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 681.225656] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] self.force_reraise() [ 681.225656] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 681.225656] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] raise self.value [ 681.225656] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 681.225656] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] updated_port = self._update_port( [ 681.225656] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 681.225656] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] _ensure_no_port_binding_failure(port) [ 681.225656] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 681.225656] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] raise exception.PortBindingFailed(port_id=port['id']) [ 681.225656] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] nova.exception.PortBindingFailed: Binding failed for port 7af3fb3f-ac6d-4b68-b071-de50eab1a1f3, please check neutron logs for more information. [ 681.225656] env[62070]: ERROR nova.compute.manager [instance: efa18997-b502-4e2e-933a-a185ab9074d5] [ 681.225938] env[62070]: DEBUG nova.compute.utils [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Binding failed for port 7af3fb3f-ac6d-4b68-b071-de50eab1a1f3, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 681.227206] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.857s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 681.230376] env[62070]: DEBUG nova.compute.manager [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Build of instance efa18997-b502-4e2e-933a-a185ab9074d5 was re-scheduled: Binding failed for port 7af3fb3f-ac6d-4b68-b071-de50eab1a1f3, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 681.230664] env[62070]: DEBUG nova.compute.manager [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 681.230890] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "refresh_cache-efa18997-b502-4e2e-933a-a185ab9074d5" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.234059] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired lock "refresh_cache-efa18997-b502-4e2e-933a-a185ab9074d5" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.234059] env[62070]: DEBUG nova.network.neutron [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 681.420604] env[62070]: INFO nova.compute.manager [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] [instance: adccca24-ed77-410b-8b69-19137cadafbd] Took 1.03 seconds to deallocate network for instance. [ 681.449489] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Releasing lock "refresh_cache-2c1dfa78-d300-4505-9f87-8e11a4973af3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 681.449641] env[62070]: DEBUG nova.compute.manager [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 681.449826] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 681.450925] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6adc9ed4-de64-4853-a487-77ef544fbbd3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.458982] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2bc4f0c-d5e0-443a-ab48-e4785784c962 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.483570] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2c1dfa78-d300-4505-9f87-8e11a4973af3 could not be found. [ 681.483765] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 681.483946] env[62070]: INFO nova.compute.manager [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Took 0.03 seconds to destroy the instance on the hypervisor. [ 681.484213] env[62070]: DEBUG oslo.service.loopingcall [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 681.484444] env[62070]: DEBUG nova.compute.manager [-] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 681.484540] env[62070]: DEBUG nova.network.neutron [-] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 681.499562] env[62070]: DEBUG nova.network.neutron [-] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 681.567089] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "f75ed36e-16c8-4a6b-bd39-eb4057ef0691" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.567336] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "f75ed36e-16c8-4a6b-bd39-eb4057ef0691" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 681.754161] env[62070]: DEBUG nova.network.neutron [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 681.877840] env[62070]: DEBUG nova.network.neutron [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.005234] env[62070]: DEBUG nova.network.neutron [-] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.229707] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b81474ba-25fc-41c5-a52a-118b88830d20 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.238511] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc598fc-300b-41ee-8852-7c2e66c9e5b7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.270600] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6b9f7f-b9bc-427b-b4ba-316a2d1ef7cb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.278012] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8bd79f6-31be-4415-9123-eb30a8d8c13c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.291352] env[62070]: DEBUG nova.compute.provider_tree [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 682.384473] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Releasing lock "refresh_cache-efa18997-b502-4e2e-933a-a185ab9074d5" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.384730] env[62070]: DEBUG nova.compute.manager [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 682.384913] env[62070]: DEBUG nova.compute.manager [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 682.385235] env[62070]: DEBUG nova.network.neutron [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 682.398930] env[62070]: DEBUG nova.network.neutron [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 682.452757] env[62070]: INFO nova.scheduler.client.report [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Deleted allocations for instance adccca24-ed77-410b-8b69-19137cadafbd [ 682.507423] env[62070]: INFO nova.compute.manager [-] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Took 1.02 seconds to deallocate network for instance. [ 682.509740] env[62070]: DEBUG nova.compute.claims [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Aborting claim: {{(pid=62070) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 682.509923] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.796560] env[62070]: DEBUG nova.scheduler.client.report [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 682.901983] env[62070]: DEBUG nova.network.neutron [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.960353] env[62070]: DEBUG oslo_concurrency.lockutils [None req-97492129-d62b-43cc-b22d-f53a63d9b75b tempest-InstanceActionsTestJSON-809963800 tempest-InstanceActionsTestJSON-809963800-project-member] Lock "adccca24-ed77-410b-8b69-19137cadafbd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.682s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.306772] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.078s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.306772] env[62070]: ERROR nova.compute.manager [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b59e3bf6-60a3-4125-8867-7c2fee078882, please check neutron logs for more information. [ 683.306772] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Traceback (most recent call last): [ 683.306772] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 683.306772] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] self.driver.spawn(context, instance, image_meta, [ 683.306772] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 683.306772] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 683.306772] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 683.306772] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] vm_ref = self.build_virtual_machine(instance, [ 683.307402] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 683.307402] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] vif_infos = vmwarevif.get_vif_info(self._session, [ 683.307402] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 683.307402] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] for vif in network_info: [ 683.307402] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 683.307402] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] return self._sync_wrapper(fn, *args, **kwargs) [ 683.307402] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 683.307402] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] self.wait() [ 683.307402] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 683.307402] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] self[:] = self._gt.wait() [ 683.307402] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 683.307402] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] return self._exit_event.wait() [ 683.307402] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 683.307758] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] result = hub.switch() [ 683.307758] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 683.307758] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] return self.greenlet.switch() [ 683.307758] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 683.307758] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] result = function(*args, **kwargs) [ 683.307758] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 683.307758] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] return func(*args, **kwargs) [ 683.307758] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 683.307758] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] raise e [ 683.307758] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 683.307758] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] nwinfo = self.network_api.allocate_for_instance( [ 683.307758] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 683.307758] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] created_port_ids = self._update_ports_for_instance( [ 683.308088] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 683.308088] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] with excutils.save_and_reraise_exception(): [ 683.308088] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 683.308088] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] self.force_reraise() [ 683.308088] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 683.308088] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] raise self.value [ 683.308088] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 683.308088] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] updated_port = self._update_port( [ 683.308088] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 683.308088] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] _ensure_no_port_binding_failure(port) [ 683.308088] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 683.308088] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] raise exception.PortBindingFailed(port_id=port['id']) [ 683.308428] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] nova.exception.PortBindingFailed: Binding failed for port b59e3bf6-60a3-4125-8867-7c2fee078882, please check neutron logs for more information. [ 683.308428] env[62070]: ERROR nova.compute.manager [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] [ 683.308428] env[62070]: DEBUG nova.compute.utils [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Binding failed for port b59e3bf6-60a3-4125-8867-7c2fee078882, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 683.308428] env[62070]: DEBUG oslo_concurrency.lockutils [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.050s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.311326] env[62070]: DEBUG nova.compute.manager [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Build of instance 9ec1b7a6-5ade-49a3-ba47-912bb328adb6 was re-scheduled: Binding failed for port b59e3bf6-60a3-4125-8867-7c2fee078882, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 683.311724] env[62070]: DEBUG nova.compute.manager [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 683.312172] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Acquiring lock "refresh_cache-9ec1b7a6-5ade-49a3-ba47-912bb328adb6" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 683.312172] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Acquired lock "refresh_cache-9ec1b7a6-5ade-49a3-ba47-912bb328adb6" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.312283] env[62070]: DEBUG nova.network.neutron [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 683.404450] env[62070]: INFO nova.compute.manager [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: efa18997-b502-4e2e-933a-a185ab9074d5] Took 1.02 seconds to deallocate network for instance. [ 683.462518] env[62070]: DEBUG nova.compute.manager [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 683.852454] env[62070]: DEBUG nova.network.neutron [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 683.984387] env[62070]: DEBUG oslo_concurrency.lockutils [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.011565] env[62070]: DEBUG nova.network.neutron [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.231724] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c0ce383-32ba-444f-825b-84ff7f8e81d2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.239471] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d8a7a69-acd3-4254-ab81-77d076c3cb48 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.270903] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9429061-9efc-479f-8be8-d4cd6070f962 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.278616] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-535cf587-9086-42ff-973f-d1975f8d660a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.292982] env[62070]: DEBUG nova.compute.provider_tree [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 684.443953] env[62070]: INFO nova.scheduler.client.report [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Deleted allocations for instance efa18997-b502-4e2e-933a-a185ab9074d5 [ 684.513236] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Releasing lock "refresh_cache-9ec1b7a6-5ade-49a3-ba47-912bb328adb6" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 684.513481] env[62070]: DEBUG nova.compute.manager [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 684.513666] env[62070]: DEBUG nova.compute.manager [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 684.513850] env[62070]: DEBUG nova.network.neutron [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 684.535844] env[62070]: DEBUG nova.network.neutron [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 684.796308] env[62070]: DEBUG nova.scheduler.client.report [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 684.958835] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca5c3ba7-16ea-4ea4-a91b-9bf5923a33ab tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "efa18997-b502-4e2e-933a-a185ab9074d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.071s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 685.038798] env[62070]: DEBUG nova.network.neutron [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.302948] env[62070]: DEBUG oslo_concurrency.lockutils [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.994s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 685.302948] env[62070]: ERROR nova.compute.manager [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a5142d5c-5bdb-43c6-90cd-7643fb62d918, please check neutron logs for more information. [ 685.302948] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Traceback (most recent call last): [ 685.302948] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 685.302948] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] self.driver.spawn(context, instance, image_meta, [ 685.302948] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 685.302948] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 685.302948] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 685.302948] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] vm_ref = self.build_virtual_machine(instance, [ 685.303458] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 685.303458] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] vif_infos = vmwarevif.get_vif_info(self._session, [ 685.303458] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 685.303458] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] for vif in network_info: [ 685.303458] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 685.303458] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] return self._sync_wrapper(fn, *args, **kwargs) [ 685.303458] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 685.303458] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] self.wait() [ 685.303458] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 685.303458] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] self[:] = self._gt.wait() [ 685.303458] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 685.303458] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] return self._exit_event.wait() [ 685.303458] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 685.303881] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] current.throw(*self._exc) [ 685.303881] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 685.303881] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] result = function(*args, **kwargs) [ 685.303881] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 685.303881] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] return func(*args, **kwargs) [ 685.303881] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 685.303881] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] raise e [ 685.303881] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 685.303881] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] nwinfo = self.network_api.allocate_for_instance( [ 685.303881] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 685.303881] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] created_port_ids = self._update_ports_for_instance( [ 685.303881] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 685.303881] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] with excutils.save_and_reraise_exception(): [ 685.304289] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 685.304289] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] self.force_reraise() [ 685.304289] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 685.304289] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] raise self.value [ 685.304289] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 685.304289] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] updated_port = self._update_port( [ 685.304289] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 685.304289] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] _ensure_no_port_binding_failure(port) [ 685.304289] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 685.304289] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] raise exception.PortBindingFailed(port_id=port['id']) [ 685.304289] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] nova.exception.PortBindingFailed: Binding failed for port a5142d5c-5bdb-43c6-90cd-7643fb62d918, please check neutron logs for more information. [ 685.304289] env[62070]: ERROR nova.compute.manager [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] [ 685.304789] env[62070]: DEBUG nova.compute.utils [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Binding failed for port a5142d5c-5bdb-43c6-90cd-7643fb62d918, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 685.306054] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 14.663s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 685.307986] env[62070]: DEBUG nova.compute.manager [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Build of instance 5da19104-b163-44cd-bb1f-68c4eb316ac1 was re-scheduled: Binding failed for port a5142d5c-5bdb-43c6-90cd-7643fb62d918, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 685.309066] env[62070]: DEBUG nova.compute.manager [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 685.309822] env[62070]: DEBUG oslo_concurrency.lockutils [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "refresh_cache-5da19104-b163-44cd-bb1f-68c4eb316ac1" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 685.309822] env[62070]: DEBUG oslo_concurrency.lockutils [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquired lock "refresh_cache-5da19104-b163-44cd-bb1f-68c4eb316ac1" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.309985] env[62070]: DEBUG nova.network.neutron [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 685.461811] env[62070]: DEBUG nova.compute.manager [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 685.542090] env[62070]: INFO nova.compute.manager [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] [instance: 9ec1b7a6-5ade-49a3-ba47-912bb328adb6] Took 1.03 seconds to deallocate network for instance. [ 685.843266] env[62070]: DEBUG nova.network.neutron [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 685.952312] env[62070]: DEBUG nova.network.neutron [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.988162] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 686.454498] env[62070]: DEBUG oslo_concurrency.lockutils [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Releasing lock "refresh_cache-5da19104-b163-44cd-bb1f-68c4eb316ac1" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.454789] env[62070]: DEBUG nova.compute.manager [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 686.454920] env[62070]: DEBUG nova.compute.manager [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 686.455137] env[62070]: DEBUG nova.network.neutron [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 686.474254] env[62070]: DEBUG nova.network.neutron [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 686.577641] env[62070]: INFO nova.scheduler.client.report [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Deleted allocations for instance 9ec1b7a6-5ade-49a3-ba47-912bb328adb6 [ 686.839851] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 9ec1b7a6-5ade-49a3-ba47-912bb328adb6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 686.976965] env[62070]: DEBUG nova.network.neutron [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.024134] env[62070]: DEBUG oslo_concurrency.lockutils [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "c3c6e93c-80be-4e71-87fb-2ff8db8d30fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.024134] env[62070]: DEBUG oslo_concurrency.lockutils [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "c3c6e93c-80be-4e71-87fb-2ff8db8d30fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.092986] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6c8ffe8c-5f36-434d-8036-6e71a11d52b4 tempest-InstanceActionsV221TestJSON-1098319233 tempest-InstanceActionsV221TestJSON-1098319233-project-member] Lock "9ec1b7a6-5ade-49a3-ba47-912bb328adb6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.200s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.345198] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 5da19104-b163-44cd-bb1f-68c4eb316ac1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 687.345198] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance a71c58e7-89db-4ad2-92e0-5379b04b751c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 687.345198] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 2c1dfa78-d300-4505-9f87-8e11a4973af3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 687.479910] env[62070]: INFO nova.compute.manager [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 5da19104-b163-44cd-bb1f-68c4eb316ac1] Took 1.02 seconds to deallocate network for instance. [ 687.598304] env[62070]: DEBUG nova.compute.manager [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 687.854358] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance d148d561-3211-4f1f-965a-f2b14cd60b11 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 688.124990] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 688.361465] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 0ac963b1-120a-464b-8228-3393135dd182 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 688.529892] env[62070]: INFO nova.scheduler.client.report [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Deleted allocations for instance 5da19104-b163-44cd-bb1f-68c4eb316ac1 [ 688.865863] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 71aead12-a182-40a7-b5a9-91c01271b800 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 689.042811] env[62070]: DEBUG oslo_concurrency.lockutils [None req-75e4eab2-d509-4e0b-8199-5c5644d968e3 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "5da19104-b163-44cd-bb1f-68c4eb316ac1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.546s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.371045] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance d0914f90-200c-4715-aaab-54beacf339b9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 689.407295] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Acquiring lock "efef4aac-5b74-4a41-9f74-3d4cb4f80cdb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 689.407869] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Lock "efef4aac-5b74-4a41-9f74-3d4cb4f80cdb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 689.545236] env[62070]: DEBUG nova.compute.manager [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 689.874338] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 30d782e4-30c7-41f6-b30d-95a9a59cf83c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 690.082789] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.211314] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "328fbc92-8162-4e12-a02d-6e9cafe0c365" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.211596] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "328fbc92-8162-4e12-a02d-6e9cafe0c365" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.378168] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 67e99ada-a8e6-4034-b19b-5b2cb883b735 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 690.880945] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 963feecc-ff58-4cbb-8d6f-3f9035337087 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.383667] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 1c1730e5-88af-4c7f-8bcc-d494db2cd723 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.886435] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 076aed5b-4b08-4f3b-a940-d9cd95c32e57 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 692.390161] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance fe378560-40b8-42c9-840d-b7d60de87c4d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 692.891438] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "3d22f50a-e1b7-48f9-a044-df64d01dfeb4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.891438] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "3d22f50a-e1b7-48f9-a044-df64d01dfeb4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 692.894252] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance bcafa04d-904b-4eab-aba1-35180c2d4b22 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 693.366293] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "5ec9074b-1237-4404-b13c-a7ca0dbe1d43" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 693.366551] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "5ec9074b-1237-4404-b13c-a7ca0dbe1d43" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.397352] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 5a146d8f-6921-4b3e-8696-d2804fb855ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 693.901655] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 42a5c5d8-5c3a-4568-b212-d87f2951a334 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 694.405339] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance a3fcb849-b015-43aa-8f95-0d4a87e2cecc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 694.909123] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance a3c42653-9a4b-42d3-bc38-8d46d95c8f64 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 695.411058] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance dd5d90e8-964a-4e1c-a98a-bcba37a1d79e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 695.915922] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 13e3576e-4f4c-4541-a637-daa124cbf8dd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 696.419476] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 748c94c7-1233-44f4-a71a-176b26518399 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 696.922962] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 359ae9f2-a907-459e-99b9-3e043d5d015f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 697.425965] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 697.928642] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 20e7a993-b1fb-4359-ab35-8b0f06ca0121 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.432411] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 1ce155c8-9a10-4eff-b428-31889aa8f638 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.935148] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance b7fdf23e-1e39-4745-ae84-38b7fa89aa5d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.439065] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 10672096-00ba-4481-8ab3-085a185076db has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.941784] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 2368b649-f931-454c-92cc-971df4155d90 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 700.444775] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance f75ed36e-16c8-4a6b-bd39-eb4057ef0691 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 700.445055] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=62070) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 700.445341] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=62070) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 700.792688] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b8255ba-bb13-4193-b9e0-054f5d2d25d2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.800467] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed27ed6d-8050-486e-a239-ef92e0e0ee26 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.833830] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db0e5d88-84c1-40cd-8156-b94c0dee6a14 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.841801] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa3a0e8-4051-4f0a-be3d-8fa7ab3b8bbe {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.854945] env[62070]: DEBUG nova.compute.provider_tree [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 701.357992] env[62070]: DEBUG nova.scheduler.client.report [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 701.862554] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62070) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 701.862780] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 16.557s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.863076] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.955s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 701.864626] env[62070]: INFO nova.compute.claims [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 703.221235] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a7eae46-7bf8-491e-b837-c5e10fc63586 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.228667] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-498b8a87-62bf-4ee6-bdd2-3a0374ec8d6b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.257099] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1270bd53-e53c-4888-b870-cb6814f167d2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.263486] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c7264c-5eef-4a60-8c3c-16562291eda9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.276917] env[62070]: DEBUG nova.compute.provider_tree [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.779885] env[62070]: DEBUG nova.scheduler.client.report [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 704.284944] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.422s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.285600] env[62070]: DEBUG nova.compute.manager [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 704.288813] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.419s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.290345] env[62070]: INFO nova.compute.claims [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 704.792217] env[62070]: DEBUG nova.compute.utils [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 704.793056] env[62070]: DEBUG nova.compute.manager [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 704.793666] env[62070]: DEBUG nova.network.neutron [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 704.853019] env[62070]: DEBUG nova.policy [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e3ee2dd49154a44bcfb94832273cd52', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '94c6fc73d5a74adb8384fd156daf3f58', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 705.179518] env[62070]: DEBUG nova.network.neutron [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Successfully created port: c5e6098a-ebbb-4eee-ba72-4ddaad679830 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 705.298347] env[62070]: DEBUG nova.compute.manager [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 705.697050] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1850a35-2b2c-42fa-a863-359f27e03467 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.704396] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13a61497-dcd8-4b62-9a00-07017e04bdea {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.734174] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c4a8ce8-32ed-49bf-9c19-8151614d3304 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.740762] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a36bbf22-5224-4496-87da-ed9c72b7506b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.753493] env[62070]: DEBUG nova.compute.provider_tree [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 706.256445] env[62070]: DEBUG nova.scheduler.client.report [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 706.310116] env[62070]: DEBUG nova.compute.manager [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 706.334498] env[62070]: DEBUG nova.virt.hardware [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:23:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='56c7fbac-8f4f-47f8-9a34-b39636f74e40',id=36,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-884828689',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 706.334738] env[62070]: DEBUG nova.virt.hardware [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 706.334892] env[62070]: DEBUG nova.virt.hardware [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 706.335084] env[62070]: DEBUG nova.virt.hardware [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 706.335247] env[62070]: DEBUG nova.virt.hardware [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 706.335401] env[62070]: DEBUG nova.virt.hardware [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 706.335600] env[62070]: DEBUG nova.virt.hardware [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 706.335756] env[62070]: DEBUG nova.virt.hardware [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 706.335996] env[62070]: DEBUG nova.virt.hardware [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 706.336079] env[62070]: DEBUG nova.virt.hardware [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 706.336266] env[62070]: DEBUG nova.virt.hardware [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 706.337499] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e2fcf08-f66f-4ebc-9587-bd54fe392a6f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.345946] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c10b6138-835c-4d9b-b6de-5d30037930bc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.663197] env[62070]: DEBUG nova.compute.manager [req-3d983902-762f-497d-bfec-c3e1f0794d05 req-f91bba33-790f-445f-b90c-acee2b8d0985 service nova] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Received event network-vif-plugged-c5e6098a-ebbb-4eee-ba72-4ddaad679830 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 706.663415] env[62070]: DEBUG oslo_concurrency.lockutils [req-3d983902-762f-497d-bfec-c3e1f0794d05 req-f91bba33-790f-445f-b90c-acee2b8d0985 service nova] Acquiring lock "d148d561-3211-4f1f-965a-f2b14cd60b11-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 706.663722] env[62070]: DEBUG oslo_concurrency.lockutils [req-3d983902-762f-497d-bfec-c3e1f0794d05 req-f91bba33-790f-445f-b90c-acee2b8d0985 service nova] Lock "d148d561-3211-4f1f-965a-f2b14cd60b11-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 706.663842] env[62070]: DEBUG oslo_concurrency.lockutils [req-3d983902-762f-497d-bfec-c3e1f0794d05 req-f91bba33-790f-445f-b90c-acee2b8d0985 service nova] Lock "d148d561-3211-4f1f-965a-f2b14cd60b11-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 706.664015] env[62070]: DEBUG nova.compute.manager [req-3d983902-762f-497d-bfec-c3e1f0794d05 req-f91bba33-790f-445f-b90c-acee2b8d0985 service nova] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] No waiting events found dispatching network-vif-plugged-c5e6098a-ebbb-4eee-ba72-4ddaad679830 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 706.664427] env[62070]: WARNING nova.compute.manager [req-3d983902-762f-497d-bfec-c3e1f0794d05 req-f91bba33-790f-445f-b90c-acee2b8d0985 service nova] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Received unexpected event network-vif-plugged-c5e6098a-ebbb-4eee-ba72-4ddaad679830 for instance with vm_state building and task_state spawning. [ 706.760923] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.472s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 706.761459] env[62070]: DEBUG nova.compute.manager [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 706.764595] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 31.865s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 706.942525] env[62070]: DEBUG nova.network.neutron [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Successfully updated port: c5e6098a-ebbb-4eee-ba72-4ddaad679830 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 707.269886] env[62070]: DEBUG nova.compute.utils [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 707.274962] env[62070]: DEBUG nova.compute.manager [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 707.274962] env[62070]: DEBUG nova.network.neutron [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 707.313033] env[62070]: DEBUG nova.policy [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7534320dee8f486e90f5174aa94d00bd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '925dff51764c4b56ae7ea05fbde2ecdd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 707.445501] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquiring lock "refresh_cache-d148d561-3211-4f1f-965a-f2b14cd60b11" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.445652] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquired lock "refresh_cache-d148d561-3211-4f1f-965a-f2b14cd60b11" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.445798] env[62070]: DEBUG nova.network.neutron [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 707.633405] env[62070]: DEBUG nova.network.neutron [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Successfully created port: 6326b098-3c76-4152-b623-8921285ec01b {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 707.671091] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-428895c6-2f9b-4917-b28e-65506ef5d4ad {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.680535] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9514de0-6b86-4d25-94e6-b8c8c02e12c2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.711080] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c038486-3752-43f7-beae-55223d4316da {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.719459] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c449d2-71c6-49f5-9f11-c8c5e06dc01b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.732913] env[62070]: DEBUG nova.compute.provider_tree [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 707.775487] env[62070]: DEBUG nova.compute.manager [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 707.985052] env[62070]: DEBUG nova.network.neutron [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 708.191088] env[62070]: DEBUG nova.network.neutron [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Updating instance_info_cache with network_info: [{"id": "c5e6098a-ebbb-4eee-ba72-4ddaad679830", "address": "fa:16:3e:7b:ab:3c", "network": {"id": "df33a08d-88db-4a22-846f-5b414705fc65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9d42cb2bbadf40d6b35f237f71234611", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5e6098a-eb", "ovs_interfaceid": "c5e6098a-ebbb-4eee-ba72-4ddaad679830", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.236146] env[62070]: DEBUG nova.scheduler.client.report [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 708.684591] env[62070]: DEBUG nova.compute.manager [req-536eef31-4366-4fd7-8a44-9296c1249dee req-e93ffdb1-bcf2-448a-83a7-c3cab0b711c5 service nova] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Received event network-changed-c5e6098a-ebbb-4eee-ba72-4ddaad679830 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 708.684952] env[62070]: DEBUG nova.compute.manager [req-536eef31-4366-4fd7-8a44-9296c1249dee req-e93ffdb1-bcf2-448a-83a7-c3cab0b711c5 service nova] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Refreshing instance network info cache due to event network-changed-c5e6098a-ebbb-4eee-ba72-4ddaad679830. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 708.684952] env[62070]: DEBUG oslo_concurrency.lockutils [req-536eef31-4366-4fd7-8a44-9296c1249dee req-e93ffdb1-bcf2-448a-83a7-c3cab0b711c5 service nova] Acquiring lock "refresh_cache-d148d561-3211-4f1f-965a-f2b14cd60b11" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.693393] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Releasing lock "refresh_cache-d148d561-3211-4f1f-965a-f2b14cd60b11" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 708.693669] env[62070]: DEBUG nova.compute.manager [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Instance network_info: |[{"id": "c5e6098a-ebbb-4eee-ba72-4ddaad679830", "address": "fa:16:3e:7b:ab:3c", "network": {"id": "df33a08d-88db-4a22-846f-5b414705fc65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9d42cb2bbadf40d6b35f237f71234611", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5e6098a-eb", "ovs_interfaceid": "c5e6098a-ebbb-4eee-ba72-4ddaad679830", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 708.693913] env[62070]: DEBUG oslo_concurrency.lockutils [req-536eef31-4366-4fd7-8a44-9296c1249dee req-e93ffdb1-bcf2-448a-83a7-c3cab0b711c5 service nova] Acquired lock "refresh_cache-d148d561-3211-4f1f-965a-f2b14cd60b11" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.694094] env[62070]: DEBUG nova.network.neutron [req-536eef31-4366-4fd7-8a44-9296c1249dee req-e93ffdb1-bcf2-448a-83a7-c3cab0b711c5 service nova] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Refreshing network info cache for port c5e6098a-ebbb-4eee-ba72-4ddaad679830 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 708.695340] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:ab:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4728adca-2846-416a-91a3-deb898faf1f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c5e6098a-ebbb-4eee-ba72-4ddaad679830', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 708.703193] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Creating folder: Project (94c6fc73d5a74adb8384fd156daf3f58). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 708.704248] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ebc104ae-0e24-4d38-ab74-09072e61a326 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.718401] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Created folder: Project (94c6fc73d5a74adb8384fd156daf3f58) in parent group-v245319. [ 708.718568] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Creating folder: Instances. Parent ref: group-v245327. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 708.718766] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f84409e-7e52-4ec9-9df3-09762a6cb3b6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.729444] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Created folder: Instances in parent group-v245327. [ 708.729660] env[62070]: DEBUG oslo.service.loopingcall [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 708.729829] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 708.730009] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b7bd3978-ff56-4216-8c7e-66b2e37b9c56 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.744136] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.980s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.744707] env[62070]: ERROR nova.compute.manager [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8470f264-09c8-4817-a711-e8ac92df552d, please check neutron logs for more information. [ 708.744707] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Traceback (most recent call last): [ 708.744707] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 708.744707] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] self.driver.spawn(context, instance, image_meta, [ 708.744707] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 708.744707] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 708.744707] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 708.744707] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] vm_ref = self.build_virtual_machine(instance, [ 708.744707] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 708.744707] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] vif_infos = vmwarevif.get_vif_info(self._session, [ 708.744707] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 708.746400] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] for vif in network_info: [ 708.746400] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 708.746400] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] return self._sync_wrapper(fn, *args, **kwargs) [ 708.746400] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 708.746400] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] self.wait() [ 708.746400] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 708.746400] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] self[:] = self._gt.wait() [ 708.746400] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 708.746400] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] return self._exit_event.wait() [ 708.746400] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 708.746400] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] result = hub.switch() [ 708.746400] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 708.746400] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] return self.greenlet.switch() [ 708.746768] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 708.746768] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] result = function(*args, **kwargs) [ 708.746768] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 708.746768] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] return func(*args, **kwargs) [ 708.746768] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 708.746768] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] raise e [ 708.746768] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 708.746768] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] nwinfo = self.network_api.allocate_for_instance( [ 708.746768] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 708.746768] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] created_port_ids = self._update_ports_for_instance( [ 708.746768] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 708.746768] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] with excutils.save_and_reraise_exception(): [ 708.746768] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 708.747283] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] self.force_reraise() [ 708.747283] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 708.747283] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] raise self.value [ 708.747283] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 708.747283] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] updated_port = self._update_port( [ 708.747283] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 708.747283] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] _ensure_no_port_binding_failure(port) [ 708.747283] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 708.747283] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] raise exception.PortBindingFailed(port_id=port['id']) [ 708.747283] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] nova.exception.PortBindingFailed: Binding failed for port 8470f264-09c8-4817-a711-e8ac92df552d, please check neutron logs for more information. [ 708.747283] env[62070]: ERROR nova.compute.manager [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] [ 708.747577] env[62070]: DEBUG nova.compute.utils [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Binding failed for port 8470f264-09c8-4817-a711-e8ac92df552d, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 708.747577] env[62070]: DEBUG oslo_concurrency.lockutils [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.404s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.747883] env[62070]: INFO nova.compute.claims [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 708.750859] env[62070]: DEBUG nova.compute.manager [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Build of instance a71c58e7-89db-4ad2-92e0-5379b04b751c was re-scheduled: Binding failed for port 8470f264-09c8-4817-a711-e8ac92df552d, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 708.751337] env[62070]: DEBUG nova.compute.manager [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 708.751560] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Acquiring lock "refresh_cache-a71c58e7-89db-4ad2-92e0-5379b04b751c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.751705] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Acquired lock "refresh_cache-a71c58e7-89db-4ad2-92e0-5379b04b751c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.751860] env[62070]: DEBUG nova.network.neutron [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 708.754751] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 708.754751] env[62070]: value = "task-1121435" [ 708.754751] env[62070]: _type = "Task" [ 708.754751] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.764485] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121435, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.787468] env[62070]: DEBUG nova.compute.manager [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 708.812493] env[62070]: DEBUG nova.virt.hardware [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 708.812734] env[62070]: DEBUG nova.virt.hardware [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 708.812884] env[62070]: DEBUG nova.virt.hardware [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 708.813072] env[62070]: DEBUG nova.virt.hardware [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 708.813473] env[62070]: DEBUG nova.virt.hardware [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 708.813473] env[62070]: DEBUG nova.virt.hardware [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 708.814054] env[62070]: DEBUG nova.virt.hardware [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 708.814258] env[62070]: DEBUG nova.virt.hardware [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 708.814435] env[62070]: DEBUG nova.virt.hardware [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 708.814597] env[62070]: DEBUG nova.virt.hardware [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 708.814769] env[62070]: DEBUG nova.virt.hardware [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 708.815772] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e534de2-a38d-4cbb-9b81-b66cb7a9d700 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.824541] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af8a21a5-6434-487c-ae15-8254cbc46a54 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.181906] env[62070]: DEBUG nova.network.neutron [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Successfully updated port: 6326b098-3c76-4152-b623-8921285ec01b {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 709.265394] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121435, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.271223] env[62070]: DEBUG nova.network.neutron [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 709.335629] env[62070]: DEBUG nova.network.neutron [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.523049] env[62070]: DEBUG nova.network.neutron [req-536eef31-4366-4fd7-8a44-9296c1249dee req-e93ffdb1-bcf2-448a-83a7-c3cab0b711c5 service nova] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Updated VIF entry in instance network info cache for port c5e6098a-ebbb-4eee-ba72-4ddaad679830. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 709.523049] env[62070]: DEBUG nova.network.neutron [req-536eef31-4366-4fd7-8a44-9296c1249dee req-e93ffdb1-bcf2-448a-83a7-c3cab0b711c5 service nova] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Updating instance_info_cache with network_info: [{"id": "c5e6098a-ebbb-4eee-ba72-4ddaad679830", "address": "fa:16:3e:7b:ab:3c", "network": {"id": "df33a08d-88db-4a22-846f-5b414705fc65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9d42cb2bbadf40d6b35f237f71234611", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5e6098a-eb", "ovs_interfaceid": "c5e6098a-ebbb-4eee-ba72-4ddaad679830", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.687404] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "refresh_cache-0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.687690] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired lock "refresh_cache-0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.687690] env[62070]: DEBUG nova.network.neutron [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 709.767352] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121435, 'name': CreateVM_Task, 'duration_secs': 0.733373} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.767542] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 709.774258] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.774428] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.774995] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 709.775279] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fa548f6-b1ba-4ded-a407-d8c1290826c7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.783359] env[62070]: DEBUG oslo_vmware.api [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Waiting for the task: (returnval){ [ 709.783359] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5236c800-e0b9-2659-5eae-483cb767c458" [ 709.783359] env[62070]: _type = "Task" [ 709.783359] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.791295] env[62070]: DEBUG oslo_vmware.api [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5236c800-e0b9-2659-5eae-483cb767c458, 'name': SearchDatastore_Task, 'duration_secs': 0.009348} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.791569] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.792051] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 709.792307] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.792459] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.792635] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 709.795046] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7c905f63-0ebb-4bbd-b50a-806e061d4c04 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.802372] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 709.802595] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 709.803226] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d65e30b6-5fbb-4d93-9b93-7eef8a05c338 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.810602] env[62070]: DEBUG oslo_vmware.api [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Waiting for the task: (returnval){ [ 709.810602] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52f616b3-2ba8-7045-3d8a-51e0fcf10b97" [ 709.810602] env[62070]: _type = "Task" [ 709.810602] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.818115] env[62070]: DEBUG oslo_vmware.api [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52f616b3-2ba8-7045-3d8a-51e0fcf10b97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.837949] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Releasing lock "refresh_cache-a71c58e7-89db-4ad2-92e0-5379b04b751c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.838100] env[62070]: DEBUG nova.compute.manager [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 709.838270] env[62070]: DEBUG nova.compute.manager [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 709.838435] env[62070]: DEBUG nova.network.neutron [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 709.855690] env[62070]: DEBUG nova.network.neutron [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 710.026023] env[62070]: DEBUG oslo_concurrency.lockutils [req-536eef31-4366-4fd7-8a44-9296c1249dee req-e93ffdb1-bcf2-448a-83a7-c3cab0b711c5 service nova] Releasing lock "refresh_cache-d148d561-3211-4f1f-965a-f2b14cd60b11" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.127378] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b6af77-1f6b-499e-a4df-6d1e26e85d9f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.134578] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbecd6ea-01ef-43de-bf34-af4aa4e9c033 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.163153] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ddd2dfd-11a4-4f00-b138-3c247c40ed5d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.171392] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0679aa3-7882-487f-8058-ed35fb139fc8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.184616] env[62070]: DEBUG nova.compute.provider_tree [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 710.220618] env[62070]: DEBUG nova.network.neutron [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 710.323489] env[62070]: DEBUG oslo_vmware.api [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52f616b3-2ba8-7045-3d8a-51e0fcf10b97, 'name': SearchDatastore_Task, 'duration_secs': 0.007861} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.324226] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89882e77-d5c3-42b3-9d90-452705a843a2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.329292] env[62070]: DEBUG oslo_vmware.api [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Waiting for the task: (returnval){ [ 710.329292] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52eeb976-df08-affa-c5e7-be01df6239e3" [ 710.329292] env[62070]: _type = "Task" [ 710.329292] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.337753] env[62070]: DEBUG oslo_vmware.api [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52eeb976-df08-affa-c5e7-be01df6239e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.360011] env[62070]: DEBUG nova.network.neutron [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.405650] env[62070]: DEBUG nova.network.neutron [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Updating instance_info_cache with network_info: [{"id": "6326b098-3c76-4152-b623-8921285ec01b", "address": "fa:16:3e:cc:12:22", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6326b098-3c", "ovs_interfaceid": "6326b098-3c76-4152-b623-8921285ec01b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.688116] env[62070]: DEBUG nova.scheduler.client.report [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 710.708337] env[62070]: DEBUG nova.compute.manager [req-9ac7c0bc-298d-41fd-b004-383a8ca135c1 req-f2be70c2-cb7f-4ea9-a9bf-6bb77c083c1c service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Received event network-vif-plugged-6326b098-3c76-4152-b623-8921285ec01b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 710.708337] env[62070]: DEBUG oslo_concurrency.lockutils [req-9ac7c0bc-298d-41fd-b004-383a8ca135c1 req-f2be70c2-cb7f-4ea9-a9bf-6bb77c083c1c service nova] Acquiring lock "0ac963b1-120a-464b-8228-3393135dd182-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 710.708486] env[62070]: DEBUG oslo_concurrency.lockutils [req-9ac7c0bc-298d-41fd-b004-383a8ca135c1 req-f2be70c2-cb7f-4ea9-a9bf-6bb77c083c1c service nova] Lock "0ac963b1-120a-464b-8228-3393135dd182-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.708661] env[62070]: DEBUG oslo_concurrency.lockutils [req-9ac7c0bc-298d-41fd-b004-383a8ca135c1 req-f2be70c2-cb7f-4ea9-a9bf-6bb77c083c1c service nova] Lock "0ac963b1-120a-464b-8228-3393135dd182-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.708827] env[62070]: DEBUG nova.compute.manager [req-9ac7c0bc-298d-41fd-b004-383a8ca135c1 req-f2be70c2-cb7f-4ea9-a9bf-6bb77c083c1c service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] No waiting events found dispatching network-vif-plugged-6326b098-3c76-4152-b623-8921285ec01b {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 710.708989] env[62070]: WARNING nova.compute.manager [req-9ac7c0bc-298d-41fd-b004-383a8ca135c1 req-f2be70c2-cb7f-4ea9-a9bf-6bb77c083c1c service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Received unexpected event network-vif-plugged-6326b098-3c76-4152-b623-8921285ec01b for instance with vm_state building and task_state spawning. [ 710.709164] env[62070]: DEBUG nova.compute.manager [req-9ac7c0bc-298d-41fd-b004-383a8ca135c1 req-f2be70c2-cb7f-4ea9-a9bf-6bb77c083c1c service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Received event network-changed-6326b098-3c76-4152-b623-8921285ec01b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 710.709317] env[62070]: DEBUG nova.compute.manager [req-9ac7c0bc-298d-41fd-b004-383a8ca135c1 req-f2be70c2-cb7f-4ea9-a9bf-6bb77c083c1c service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Refreshing instance network info cache due to event network-changed-6326b098-3c76-4152-b623-8921285ec01b. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 710.709481] env[62070]: DEBUG oslo_concurrency.lockutils [req-9ac7c0bc-298d-41fd-b004-383a8ca135c1 req-f2be70c2-cb7f-4ea9-a9bf-6bb77c083c1c service nova] Acquiring lock "refresh_cache-0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 710.839258] env[62070]: DEBUG oslo_vmware.api [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52eeb976-df08-affa-c5e7-be01df6239e3, 'name': SearchDatastore_Task, 'duration_secs': 0.008656} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.839464] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.839720] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] d148d561-3211-4f1f-965a-f2b14cd60b11/d148d561-3211-4f1f-965a-f2b14cd60b11.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 710.840230] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d2112e5f-2823-4803-8cba-b84fce2d1967 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.848825] env[62070]: DEBUG oslo_vmware.api [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Waiting for the task: (returnval){ [ 710.848825] env[62070]: value = "task-1121436" [ 710.848825] env[62070]: _type = "Task" [ 710.848825] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.858333] env[62070]: DEBUG oslo_vmware.api [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121436, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.863090] env[62070]: INFO nova.compute.manager [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] [instance: a71c58e7-89db-4ad2-92e0-5379b04b751c] Took 1.02 seconds to deallocate network for instance. [ 710.908190] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Releasing lock "refresh_cache-0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.908562] env[62070]: DEBUG nova.compute.manager [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Instance network_info: |[{"id": "6326b098-3c76-4152-b623-8921285ec01b", "address": "fa:16:3e:cc:12:22", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6326b098-3c", "ovs_interfaceid": "6326b098-3c76-4152-b623-8921285ec01b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 710.908875] env[62070]: DEBUG oslo_concurrency.lockutils [req-9ac7c0bc-298d-41fd-b004-383a8ca135c1 req-f2be70c2-cb7f-4ea9-a9bf-6bb77c083c1c service nova] Acquired lock "refresh_cache-0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.909107] env[62070]: DEBUG nova.network.neutron [req-9ac7c0bc-298d-41fd-b004-383a8ca135c1 req-f2be70c2-cb7f-4ea9-a9bf-6bb77c083c1c service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Refreshing network info cache for port 6326b098-3c76-4152-b623-8921285ec01b {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 710.910420] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:12:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2c7c1b46-cb81-45da-b5aa-7905d4da5854', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6326b098-3c76-4152-b623-8921285ec01b', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 710.918285] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Creating folder: Project (925dff51764c4b56ae7ea05fbde2ecdd). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 710.919301] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5c8e43d2-1ed6-4af9-a241-26f2e9b207b9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.929711] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Created folder: Project (925dff51764c4b56ae7ea05fbde2ecdd) in parent group-v245319. [ 710.929850] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Creating folder: Instances. Parent ref: group-v245330. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 710.930107] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7eb922af-a195-4ece-96b5-2984258a7daa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.939641] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Created folder: Instances in parent group-v245330. [ 710.939890] env[62070]: DEBUG oslo.service.loopingcall [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 710.940106] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 710.940321] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1813daf0-bdc5-4d26-bd28-86ca07cc88b4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.959564] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 710.959564] env[62070]: value = "task-1121439" [ 710.959564] env[62070]: _type = "Task" [ 710.959564] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.967341] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121439, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.195173] env[62070]: DEBUG oslo_concurrency.lockutils [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.448s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 711.195943] env[62070]: DEBUG nova.compute.manager [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 711.198851] env[62070]: DEBUG oslo_concurrency.lockutils [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.882s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.200631] env[62070]: INFO nova.compute.claims [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 711.358695] env[62070]: DEBUG oslo_vmware.api [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121436, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.444099} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.358973] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] d148d561-3211-4f1f-965a-f2b14cd60b11/d148d561-3211-4f1f-965a-f2b14cd60b11.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 711.359200] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 711.359450] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-34158bed-0499-4de8-abb7-540c953ceaa6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.366028] env[62070]: DEBUG oslo_vmware.api [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Waiting for the task: (returnval){ [ 711.366028] env[62070]: value = "task-1121440" [ 711.366028] env[62070]: _type = "Task" [ 711.366028] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.379020] env[62070]: DEBUG oslo_vmware.api [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121440, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.469019] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121439, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.684952] env[62070]: DEBUG nova.network.neutron [req-9ac7c0bc-298d-41fd-b004-383a8ca135c1 req-f2be70c2-cb7f-4ea9-a9bf-6bb77c083c1c service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Updated VIF entry in instance network info cache for port 6326b098-3c76-4152-b623-8921285ec01b. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 711.685371] env[62070]: DEBUG nova.network.neutron [req-9ac7c0bc-298d-41fd-b004-383a8ca135c1 req-f2be70c2-cb7f-4ea9-a9bf-6bb77c083c1c service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Updating instance_info_cache with network_info: [{"id": "6326b098-3c76-4152-b623-8921285ec01b", "address": "fa:16:3e:cc:12:22", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6326b098-3c", "ovs_interfaceid": "6326b098-3c76-4152-b623-8921285ec01b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.709472] env[62070]: DEBUG nova.compute.utils [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 711.710781] env[62070]: DEBUG nova.compute.manager [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 711.710949] env[62070]: DEBUG nova.network.neutron [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 711.767978] env[62070]: DEBUG nova.policy [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0ab707a4862f42199fc2a91733563cde', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f29ac48ab6544ec0bd1d210aec05dbc5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 711.878581] env[62070]: DEBUG oslo_vmware.api [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121440, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.126308} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.879042] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 711.879772] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fab9d4d-b331-40af-9a53-ac3fdaed1f3d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.901257] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] d148d561-3211-4f1f-965a-f2b14cd60b11/d148d561-3211-4f1f-965a-f2b14cd60b11.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 711.902234] env[62070]: INFO nova.scheduler.client.report [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Deleted allocations for instance a71c58e7-89db-4ad2-92e0-5379b04b751c [ 711.907322] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ec38cec-f22b-4585-b5c5-26fa5c0bc6de {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.928967] env[62070]: DEBUG oslo_vmware.api [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Waiting for the task: (returnval){ [ 711.928967] env[62070]: value = "task-1121441" [ 711.928967] env[62070]: _type = "Task" [ 711.928967] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.937304] env[62070]: DEBUG oslo_vmware.api [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121441, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.970040] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121439, 'name': CreateVM_Task, 'duration_secs': 0.584145} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.970506] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 711.971177] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.971343] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.971666] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 711.971910] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b5d8757-1e24-4858-99e2-d3807285e469 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.976514] env[62070]: DEBUG oslo_vmware.api [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 711.976514] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5281c036-c749-b55d-3ec4-68cdfda6181d" [ 711.976514] env[62070]: _type = "Task" [ 711.976514] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.984735] env[62070]: DEBUG oslo_vmware.api [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5281c036-c749-b55d-3ec4-68cdfda6181d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.083699] env[62070]: DEBUG nova.network.neutron [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Successfully created port: a3ed0957-14c2-4144-8d45-f4a0e5cb45ab {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 712.188069] env[62070]: DEBUG oslo_concurrency.lockutils [req-9ac7c0bc-298d-41fd-b004-383a8ca135c1 req-f2be70c2-cb7f-4ea9-a9bf-6bb77c083c1c service nova] Releasing lock "refresh_cache-0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.215499] env[62070]: DEBUG nova.compute.manager [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 712.423813] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1a6044f5-8e8b-4bc9-a7d0-857f0899f9f6 tempest-DeleteServersAdminTestJSON-428050376 tempest-DeleteServersAdminTestJSON-428050376-project-member] Lock "a71c58e7-89db-4ad2-92e0-5379b04b751c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 119.651s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 712.441378] env[62070]: DEBUG oslo_vmware.api [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121441, 'name': ReconfigVM_Task, 'duration_secs': 0.289249} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.441726] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Reconfigured VM instance instance-00000020 to attach disk [datastore1] d148d561-3211-4f1f-965a-f2b14cd60b11/d148d561-3211-4f1f-965a-f2b14cd60b11.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 712.442644] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3a2fdf4f-65e3-43fb-a371-1f5fca9a3a3b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.449162] env[62070]: DEBUG oslo_vmware.api [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Waiting for the task: (returnval){ [ 712.449162] env[62070]: value = "task-1121442" [ 712.449162] env[62070]: _type = "Task" [ 712.449162] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.459670] env[62070]: DEBUG oslo_vmware.api [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121442, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.488780] env[62070]: DEBUG oslo_vmware.api [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5281c036-c749-b55d-3ec4-68cdfda6181d, 'name': SearchDatastore_Task, 'duration_secs': 0.008787} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.489107] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.489377] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 712.490062] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.490062] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.490537] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 712.490537] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a60f32b1-9064-4997-8fb8-c2ebf47d17de {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.497870] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 712.498067] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 712.498791] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7633ece-fd53-4df9-b404-1b64b2f71d1f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.503650] env[62070]: DEBUG oslo_vmware.api [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 712.503650] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52f8ea29-94fc-899d-f7e1-03288d44e415" [ 712.503650] env[62070]: _type = "Task" [ 712.503650] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.516351] env[62070]: DEBUG oslo_vmware.api [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52f8ea29-94fc-899d-f7e1-03288d44e415, 'name': SearchDatastore_Task, 'duration_secs': 0.009039} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.517108] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1a40613-45f3-4f0a-bbfc-e1935b3f9b58 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.522119] env[62070]: DEBUG oslo_vmware.api [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 712.522119] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5279697b-5bde-79fc-fbcf-854b56c602c5" [ 712.522119] env[62070]: _type = "Task" [ 712.522119] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.533270] env[62070]: DEBUG oslo_vmware.api [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5279697b-5bde-79fc-fbcf-854b56c602c5, 'name': SearchDatastore_Task, 'duration_secs': 0.008096} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.533532] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.533817] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 0ac963b1-120a-464b-8228-3393135dd182/0ac963b1-120a-464b-8228-3393135dd182.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 712.534116] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-94b0de7c-d8e1-4659-aceb-e015397ef088 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.540131] env[62070]: DEBUG oslo_vmware.api [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 712.540131] env[62070]: value = "task-1121443" [ 712.540131] env[62070]: _type = "Task" [ 712.540131] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.550534] env[62070]: DEBUG oslo_vmware.api [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121443, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.657728] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0ecb49-113b-48be-bd7d-2518d9abc54f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.664966] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e273a904-9860-4274-8f02-ffb743e4accd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.696360] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bced5bc3-a82f-417b-8f6a-f3d81ef676bd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.703662] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19159d8f-2031-4f6d-97df-81011847b35f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.719394] env[62070]: DEBUG nova.compute.provider_tree [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 712.926860] env[62070]: DEBUG nova.compute.manager [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 712.961622] env[62070]: DEBUG oslo_vmware.api [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121442, 'name': Rename_Task, 'duration_secs': 0.166216} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.962469] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 712.962469] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-22d581da-8252-42fc-a4a3-2143ddd045f1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.970503] env[62070]: DEBUG oslo_vmware.api [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Waiting for the task: (returnval){ [ 712.970503] env[62070]: value = "task-1121444" [ 712.970503] env[62070]: _type = "Task" [ 712.970503] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.983063] env[62070]: DEBUG oslo_vmware.api [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121444, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.055027] env[62070]: DEBUG oslo_vmware.api [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121443, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474947} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.055263] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 0ac963b1-120a-464b-8228-3393135dd182/0ac963b1-120a-464b-8228-3393135dd182.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 713.055572] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 713.055827] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-08d5c766-e1b4-408c-b205-302e0e75f11f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.062443] env[62070]: DEBUG oslo_vmware.api [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 713.062443] env[62070]: value = "task-1121445" [ 713.062443] env[62070]: _type = "Task" [ 713.062443] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.071238] env[62070]: DEBUG oslo_vmware.api [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121445, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.225540] env[62070]: DEBUG nova.scheduler.client.report [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 713.231201] env[62070]: DEBUG nova.compute.manager [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 713.259917] env[62070]: DEBUG nova.virt.hardware [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 713.260196] env[62070]: DEBUG nova.virt.hardware [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 713.260330] env[62070]: DEBUG nova.virt.hardware [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 713.260506] env[62070]: DEBUG nova.virt.hardware [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 713.260642] env[62070]: DEBUG nova.virt.hardware [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 713.260781] env[62070]: DEBUG nova.virt.hardware [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 713.261075] env[62070]: DEBUG nova.virt.hardware [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 713.261162] env[62070]: DEBUG nova.virt.hardware [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 713.261288] env[62070]: DEBUG nova.virt.hardware [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 713.261440] env[62070]: DEBUG nova.virt.hardware [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 713.261609] env[62070]: DEBUG nova.virt.hardware [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 713.262456] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fbdb23d-255d-429a-acc3-5bbe2247c91a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.271315] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5411d21-f22d-40cb-95cc-b38421e788a2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.451482] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 713.479790] env[62070]: DEBUG oslo_vmware.api [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121444, 'name': PowerOnVM_Task, 'duration_secs': 0.486608} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.480060] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 713.480261] env[62070]: INFO nova.compute.manager [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Took 7.17 seconds to spawn the instance on the hypervisor. [ 713.480502] env[62070]: DEBUG nova.compute.manager [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 713.481184] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab6e265-a75a-4202-893b-f06ce40faa4f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.553035] env[62070]: DEBUG nova.compute.manager [req-2db45831-c4d4-4461-9b51-0c66d4ad642f req-b45d9830-e7fa-4dd7-a491-fcf13a47283b service nova] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Received event network-vif-plugged-a3ed0957-14c2-4144-8d45-f4a0e5cb45ab {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 713.554190] env[62070]: DEBUG oslo_concurrency.lockutils [req-2db45831-c4d4-4461-9b51-0c66d4ad642f req-b45d9830-e7fa-4dd7-a491-fcf13a47283b service nova] Acquiring lock "71aead12-a182-40a7-b5a9-91c01271b800-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 713.554190] env[62070]: DEBUG oslo_concurrency.lockutils [req-2db45831-c4d4-4461-9b51-0c66d4ad642f req-b45d9830-e7fa-4dd7-a491-fcf13a47283b service nova] Lock "71aead12-a182-40a7-b5a9-91c01271b800-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.554190] env[62070]: DEBUG oslo_concurrency.lockutils [req-2db45831-c4d4-4461-9b51-0c66d4ad642f req-b45d9830-e7fa-4dd7-a491-fcf13a47283b service nova] Lock "71aead12-a182-40a7-b5a9-91c01271b800-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.554190] env[62070]: DEBUG nova.compute.manager [req-2db45831-c4d4-4461-9b51-0c66d4ad642f req-b45d9830-e7fa-4dd7-a491-fcf13a47283b service nova] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] No waiting events found dispatching network-vif-plugged-a3ed0957-14c2-4144-8d45-f4a0e5cb45ab {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 713.554190] env[62070]: WARNING nova.compute.manager [req-2db45831-c4d4-4461-9b51-0c66d4ad642f req-b45d9830-e7fa-4dd7-a491-fcf13a47283b service nova] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Received unexpected event network-vif-plugged-a3ed0957-14c2-4144-8d45-f4a0e5cb45ab for instance with vm_state building and task_state spawning. [ 713.572775] env[62070]: DEBUG oslo_vmware.api [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121445, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065186} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.573113] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 713.573897] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24090f20-3130-44aa-b65f-e16ace59c59c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.597986] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] 0ac963b1-120a-464b-8228-3393135dd182/0ac963b1-120a-464b-8228-3393135dd182.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 713.599087] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9cf40131-ef96-439b-a2ad-bc76840869f8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.622883] env[62070]: DEBUG oslo_vmware.api [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 713.622883] env[62070]: value = "task-1121446" [ 713.622883] env[62070]: _type = "Task" [ 713.622883] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.632357] env[62070]: DEBUG oslo_vmware.api [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121446, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.713115] env[62070]: DEBUG nova.network.neutron [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Successfully updated port: a3ed0957-14c2-4144-8d45-f4a0e5cb45ab {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 713.733915] env[62070]: DEBUG oslo_concurrency.lockutils [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.534s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.733915] env[62070]: DEBUG nova.compute.manager [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 713.737639] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 31.227s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.997472] env[62070]: INFO nova.compute.manager [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Took 43.11 seconds to build instance. [ 714.135466] env[62070]: DEBUG oslo_vmware.api [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121446, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.218615] env[62070]: DEBUG oslo_concurrency.lockutils [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 714.218916] env[62070]: DEBUG oslo_concurrency.lockutils [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.219231] env[62070]: DEBUG nova.network.neutron [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 714.249932] env[62070]: DEBUG nova.compute.utils [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 714.251783] env[62070]: DEBUG nova.compute.manager [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 714.251975] env[62070]: DEBUG nova.network.neutron [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 714.314286] env[62070]: DEBUG nova.policy [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd1222ab8ea414849a747026baaa8fd2c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '49dd924c6f8e4a78bb8d57c805ab40f3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 714.501454] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1df742cb-33a0-4f39-9606-d3eecf96e864 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Lock "d148d561-3211-4f1f-965a-f2b14cd60b11" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.208s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 714.635507] env[62070]: DEBUG oslo_vmware.api [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121446, 'name': ReconfigVM_Task, 'duration_secs': 0.898401} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.638241] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Reconfigured VM instance instance-00000021 to attach disk [datastore1] 0ac963b1-120a-464b-8228-3393135dd182/0ac963b1-120a-464b-8228-3393135dd182.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 714.640035] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-53c17e3a-d209-45f8-8aa1-e101be48f86d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.646335] env[62070]: DEBUG oslo_vmware.api [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 714.646335] env[62070]: value = "task-1121447" [ 714.646335] env[62070]: _type = "Task" [ 714.646335] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.655766] env[62070]: DEBUG oslo_vmware.api [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121447, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.660484] env[62070]: DEBUG nova.network.neutron [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Successfully created port: e23bf645-d900-4495-8917-316b3ab16ce6 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 714.749619] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbcbbfa7-2ac2-40f2-915b-ab74068d1cee {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.756390] env[62070]: DEBUG nova.network.neutron [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 714.758581] env[62070]: DEBUG nova.compute.manager [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 714.767248] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c719cb28-3dc2-4fc5-8ca5-dd79e5a293b1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.804818] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-969152e6-3093-40c6-8edd-f9e56ac0546d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.814406] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51c0864e-6996-4983-99a4-d861f29666b3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.831059] env[62070]: DEBUG nova.compute.provider_tree [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 715.008021] env[62070]: DEBUG nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 715.101936] env[62070]: DEBUG nova.network.neutron [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Updating instance_info_cache with network_info: [{"id": "a3ed0957-14c2-4144-8d45-f4a0e5cb45ab", "address": "fa:16:3e:3c:6a:3d", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3ed0957-14", "ovs_interfaceid": "a3ed0957-14c2-4144-8d45-f4a0e5cb45ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.156852] env[62070]: DEBUG oslo_vmware.api [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121447, 'name': Rename_Task, 'duration_secs': 0.142423} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.157136] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 715.157375] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2465eeae-14d6-4d68-888f-09137ffe33c8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.164591] env[62070]: DEBUG oslo_vmware.api [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 715.164591] env[62070]: value = "task-1121448" [ 715.164591] env[62070]: _type = "Task" [ 715.164591] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.177888] env[62070]: DEBUG oslo_vmware.api [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121448, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.334327] env[62070]: DEBUG nova.scheduler.client.report [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 715.531087] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 715.592763] env[62070]: DEBUG nova.compute.manager [req-21a2b194-ebb2-44e7-8911-e167628161ca req-b3834c8a-bf2c-4e27-b385-48534263d7d1 service nova] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Received event network-changed-a3ed0957-14c2-4144-8d45-f4a0e5cb45ab {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 715.592932] env[62070]: DEBUG nova.compute.manager [req-21a2b194-ebb2-44e7-8911-e167628161ca req-b3834c8a-bf2c-4e27-b385-48534263d7d1 service nova] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Refreshing instance network info cache due to event network-changed-a3ed0957-14c2-4144-8d45-f4a0e5cb45ab. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 715.593149] env[62070]: DEBUG oslo_concurrency.lockutils [req-21a2b194-ebb2-44e7-8911-e167628161ca req-b3834c8a-bf2c-4e27-b385-48534263d7d1 service nova] Acquiring lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.604964] env[62070]: DEBUG oslo_concurrency.lockutils [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Releasing lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 715.605174] env[62070]: DEBUG nova.compute.manager [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Instance network_info: |[{"id": "a3ed0957-14c2-4144-8d45-f4a0e5cb45ab", "address": "fa:16:3e:3c:6a:3d", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3ed0957-14", "ovs_interfaceid": "a3ed0957-14c2-4144-8d45-f4a0e5cb45ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 715.605518] env[62070]: DEBUG oslo_concurrency.lockutils [req-21a2b194-ebb2-44e7-8911-e167628161ca req-b3834c8a-bf2c-4e27-b385-48534263d7d1 service nova] Acquired lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.605780] env[62070]: DEBUG nova.network.neutron [req-21a2b194-ebb2-44e7-8911-e167628161ca req-b3834c8a-bf2c-4e27-b385-48534263d7d1 service nova] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Refreshing network info cache for port a3ed0957-14c2-4144-8d45-f4a0e5cb45ab {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 715.607010] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:6a:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1559ce49-7345-443f-bf02-4bfeb88356ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a3ed0957-14c2-4144-8d45-f4a0e5cb45ab', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 715.615841] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Creating folder: Project (f29ac48ab6544ec0bd1d210aec05dbc5). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 715.617229] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aaa8f37f-03c9-4435-a038-70b59fac7f2e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.629039] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Created folder: Project (f29ac48ab6544ec0bd1d210aec05dbc5) in parent group-v245319. [ 715.629246] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Creating folder: Instances. Parent ref: group-v245333. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 715.629481] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b69e0a74-d388-4919-a6ee-b4cc3e383409 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.639431] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Created folder: Instances in parent group-v245333. [ 715.639984] env[62070]: DEBUG oslo.service.loopingcall [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 715.640290] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 715.640515] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-57f0d308-7ac8-45f2-b5b3-7543bbcc4b97 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.660631] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 715.660631] env[62070]: value = "task-1121451" [ 715.660631] env[62070]: _type = "Task" [ 715.660631] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.671450] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121451, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.676166] env[62070]: DEBUG oslo_vmware.api [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121448, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.769286] env[62070]: DEBUG nova.compute.manager [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 715.799549] env[62070]: DEBUG nova.virt.hardware [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 715.800297] env[62070]: DEBUG nova.virt.hardware [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 715.800297] env[62070]: DEBUG nova.virt.hardware [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 715.800402] env[62070]: DEBUG nova.virt.hardware [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 715.800516] env[62070]: DEBUG nova.virt.hardware [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 715.800663] env[62070]: DEBUG nova.virt.hardware [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 715.800882] env[62070]: DEBUG nova.virt.hardware [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 715.801058] env[62070]: DEBUG nova.virt.hardware [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 715.801345] env[62070]: DEBUG nova.virt.hardware [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 715.801529] env[62070]: DEBUG nova.virt.hardware [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 715.801723] env[62070]: DEBUG nova.virt.hardware [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 715.802887] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b833e41-001b-4006-8f91-4c9289d4245f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.813056] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff4cb6c-29d1-4c06-bf50-52c125abd711 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.839907] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.102s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.840586] env[62070]: ERROR nova.compute.manager [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e2dc7d43-c5bb-4bda-a4db-bcb59bb3342d, please check neutron logs for more information. [ 715.840586] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Traceback (most recent call last): [ 715.840586] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 715.840586] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] self.driver.spawn(context, instance, image_meta, [ 715.840586] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 715.840586] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 715.840586] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 715.840586] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] vm_ref = self.build_virtual_machine(instance, [ 715.840586] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 715.840586] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] vif_infos = vmwarevif.get_vif_info(self._session, [ 715.840586] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 715.840983] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] for vif in network_info: [ 715.840983] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 715.840983] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] return self._sync_wrapper(fn, *args, **kwargs) [ 715.840983] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 715.840983] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] self.wait() [ 715.840983] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 715.840983] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] self[:] = self._gt.wait() [ 715.840983] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 715.840983] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] return self._exit_event.wait() [ 715.840983] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 715.840983] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] result = hub.switch() [ 715.840983] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 715.840983] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] return self.greenlet.switch() [ 715.841322] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 715.841322] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] result = function(*args, **kwargs) [ 715.841322] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 715.841322] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] return func(*args, **kwargs) [ 715.841322] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 715.841322] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] raise e [ 715.841322] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 715.841322] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] nwinfo = self.network_api.allocate_for_instance( [ 715.841322] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 715.841322] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] created_port_ids = self._update_ports_for_instance( [ 715.841322] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 715.841322] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] with excutils.save_and_reraise_exception(): [ 715.841322] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 715.841699] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] self.force_reraise() [ 715.841699] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 715.841699] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] raise self.value [ 715.841699] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 715.841699] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] updated_port = self._update_port( [ 715.841699] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 715.841699] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] _ensure_no_port_binding_failure(port) [ 715.841699] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 715.841699] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] raise exception.PortBindingFailed(port_id=port['id']) [ 715.841699] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] nova.exception.PortBindingFailed: Binding failed for port e2dc7d43-c5bb-4bda-a4db-bcb59bb3342d, please check neutron logs for more information. [ 715.841699] env[62070]: ERROR nova.compute.manager [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] [ 715.841984] env[62070]: DEBUG nova.compute.utils [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Binding failed for port e2dc7d43-c5bb-4bda-a4db-bcb59bb3342d, please check neutron logs for more information. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 715.842717] env[62070]: DEBUG oslo_concurrency.lockutils [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.858s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.844288] env[62070]: INFO nova.compute.claims [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 715.847356] env[62070]: DEBUG nova.compute.manager [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Build of instance 2c1dfa78-d300-4505-9f87-8e11a4973af3 was re-scheduled: Binding failed for port e2dc7d43-c5bb-4bda-a4db-bcb59bb3342d, please check neutron logs for more information. {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 715.847798] env[62070]: DEBUG nova.compute.manager [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Unplugging VIFs for instance {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 715.848035] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Acquiring lock "refresh_cache-2c1dfa78-d300-4505-9f87-8e11a4973af3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.848188] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Acquired lock "refresh_cache-2c1dfa78-d300-4505-9f87-8e11a4973af3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.848520] env[62070]: DEBUG nova.network.neutron [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 716.175318] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121451, 'name': CreateVM_Task, 'duration_secs': 0.337437} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.175919] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 716.177056] env[62070]: DEBUG oslo_vmware.service [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb5cc771-e557-470d-a6a8-87270cbe79e9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.182477] env[62070]: DEBUG oslo_vmware.api [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121448, 'name': PowerOnVM_Task, 'duration_secs': 0.756711} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.183031] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 716.183290] env[62070]: INFO nova.compute.manager [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Took 7.40 seconds to spawn the instance on the hypervisor. [ 716.183545] env[62070]: DEBUG nova.compute.manager [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 716.184804] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc0d074-ce86-404a-9d93-5efd67362359 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.188907] env[62070]: DEBUG oslo_concurrency.lockutils [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.189353] env[62070]: DEBUG oslo_concurrency.lockutils [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.189717] env[62070]: DEBUG oslo_concurrency.lockutils [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 716.189955] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d48386f-4c4f-4c50-a613-9ccbea43f599 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.194268] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 716.194268] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ddb0d2-a78b-c4fd-55a8-c3b9ccd9eb3d" [ 716.194268] env[62070]: _type = "Task" [ 716.194268] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.209140] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52ddb0d2-a78b-c4fd-55a8-c3b9ccd9eb3d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.297599] env[62070]: DEBUG nova.compute.manager [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Stashing vm_state: active {{(pid=62070) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 716.379525] env[62070]: DEBUG nova.network.neutron [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 716.486060] env[62070]: DEBUG nova.network.neutron [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Successfully updated port: e23bf645-d900-4495-8917-316b3ab16ce6 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 716.576184] env[62070]: DEBUG nova.network.neutron [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.591339] env[62070]: DEBUG nova.network.neutron [req-21a2b194-ebb2-44e7-8911-e167628161ca req-b3834c8a-bf2c-4e27-b385-48534263d7d1 service nova] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Updated VIF entry in instance network info cache for port a3ed0957-14c2-4144-8d45-f4a0e5cb45ab. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 716.591806] env[62070]: DEBUG nova.network.neutron [req-21a2b194-ebb2-44e7-8911-e167628161ca req-b3834c8a-bf2c-4e27-b385-48534263d7d1 service nova] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Updating instance_info_cache with network_info: [{"id": "a3ed0957-14c2-4144-8d45-f4a0e5cb45ab", "address": "fa:16:3e:3c:6a:3d", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3ed0957-14", "ovs_interfaceid": "a3ed0957-14c2-4144-8d45-f4a0e5cb45ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.716843] env[62070]: DEBUG oslo_concurrency.lockutils [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 716.716965] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 716.717107] env[62070]: DEBUG oslo_concurrency.lockutils [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.717368] env[62070]: DEBUG oslo_concurrency.lockutils [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.717559] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 716.718034] env[62070]: INFO nova.compute.manager [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Took 43.87 seconds to build instance. [ 716.718876] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c9073c3-e094-4ad7-9175-68ca5c9614af {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.737071] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 716.737524] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 716.738544] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aa1dc80-86e9-4b15-9ec9-9df4fd0f2bcd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.745187] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb963469-0150-4816-bae8-9445a5ae4f47 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.750782] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 716.750782] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]528723f8-3629-484a-cf0a-17f81c618f0a" [ 716.750782] env[62070]: _type = "Task" [ 716.750782] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.758372] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]528723f8-3629-484a-cf0a-17f81c618f0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.822298] env[62070]: DEBUG oslo_concurrency.lockutils [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.991150] env[62070]: DEBUG oslo_concurrency.lockutils [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Acquiring lock "refresh_cache-d0914f90-200c-4715-aaab-54beacf339b9" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.991302] env[62070]: DEBUG oslo_concurrency.lockutils [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Acquired lock "refresh_cache-d0914f90-200c-4715-aaab-54beacf339b9" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.991452] env[62070]: DEBUG nova.network.neutron [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 717.079932] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Releasing lock "refresh_cache-2c1dfa78-d300-4505-9f87-8e11a4973af3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 717.080170] env[62070]: DEBUG nova.compute.manager [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62070) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 717.080347] env[62070]: DEBUG nova.compute.manager [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 717.080512] env[62070]: DEBUG nova.network.neutron [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 717.095195] env[62070]: DEBUG oslo_concurrency.lockutils [req-21a2b194-ebb2-44e7-8911-e167628161ca req-b3834c8a-bf2c-4e27-b385-48534263d7d1 service nova] Releasing lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 717.109620] env[62070]: DEBUG nova.network.neutron [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 717.222375] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93feba92-6b0d-4708-bd35-b11f5f9670fd tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "0ac963b1-120a-464b-8228-3393135dd182" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.172s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.264025] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Preparing fetch location {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 717.264025] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Creating directory with path [datastore2] vmware_temp/e6a60a36-35cb-4e02-ab1d-01daf73cb2f2/43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 717.264025] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8dbee806-8099-4327-aa39-181f36b72cd7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.291972] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab15f14-1c0f-4f35-a197-cc90969e17c3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.300826] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-012f21af-7a4d-4ce8-a1aa-95cbfb5836bb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.308030] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Created directory with path [datastore2] vmware_temp/e6a60a36-35cb-4e02-ab1d-01daf73cb2f2/43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 717.308030] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Fetch image to [datastore2] vmware_temp/e6a60a36-35cb-4e02-ab1d-01daf73cb2f2/43ea607c-7ece-4601-9b11-75c6a16aa7dd/tmp-sparse.vmdk {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 717.308030] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Downloading image file data 43ea607c-7ece-4601-9b11-75c6a16aa7dd to [datastore2] vmware_temp/e6a60a36-35cb-4e02-ab1d-01daf73cb2f2/43ea607c-7ece-4601-9b11-75c6a16aa7dd/tmp-sparse.vmdk on the data store datastore2 {{(pid=62070) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 717.308725] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c452f6-2fae-424d-816c-4de06684e210 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.340938] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06169f0-b2a6-4c52-9eaa-a6c2cc3aeacb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.347108] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd8fecd8-c5f9-4601-bbfb-8601a8835a18 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.353735] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a574fbc-c5d5-4d9d-b280-c11b72e94751 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.364726] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14766840-2ad7-4426-8708-829d665dffd7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.377874] env[62070]: DEBUG nova.compute.provider_tree [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 717.408657] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7000cbf0-32a6-4292-ae26-512344851ec3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.415474] env[62070]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6dcab1cc-c589-446a-9c31-f729b77426e4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.436664] env[62070]: DEBUG nova.virt.vmwareapi.images [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Downloading image file data 43ea607c-7ece-4601-9b11-75c6a16aa7dd to the data store datastore2 {{(pid=62070) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 717.530592] env[62070]: DEBUG oslo_vmware.rw_handles [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e6a60a36-35cb-4e02-ab1d-01daf73cb2f2/43ea607c-7ece-4601-9b11-75c6a16aa7dd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62070) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 717.608460] env[62070]: DEBUG nova.network.neutron [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 717.614129] env[62070]: DEBUG nova.network.neutron [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.728554] env[62070]: DEBUG nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 717.774539] env[62070]: DEBUG nova.compute.manager [req-0f5840fc-9c09-47aa-9059-8106019d11bc req-b5aa3fc7-f33e-413f-ae7b-24de19362e04 service nova] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Received event network-vif-plugged-e23bf645-d900-4495-8917-316b3ab16ce6 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 717.774767] env[62070]: DEBUG oslo_concurrency.lockutils [req-0f5840fc-9c09-47aa-9059-8106019d11bc req-b5aa3fc7-f33e-413f-ae7b-24de19362e04 service nova] Acquiring lock "d0914f90-200c-4715-aaab-54beacf339b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.775077] env[62070]: DEBUG oslo_concurrency.lockutils [req-0f5840fc-9c09-47aa-9059-8106019d11bc req-b5aa3fc7-f33e-413f-ae7b-24de19362e04 service nova] Lock "d0914f90-200c-4715-aaab-54beacf339b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.775286] env[62070]: DEBUG oslo_concurrency.lockutils [req-0f5840fc-9c09-47aa-9059-8106019d11bc req-b5aa3fc7-f33e-413f-ae7b-24de19362e04 service nova] Lock "d0914f90-200c-4715-aaab-54beacf339b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.775539] env[62070]: DEBUG nova.compute.manager [req-0f5840fc-9c09-47aa-9059-8106019d11bc req-b5aa3fc7-f33e-413f-ae7b-24de19362e04 service nova] [instance: d0914f90-200c-4715-aaab-54beacf339b9] No waiting events found dispatching network-vif-plugged-e23bf645-d900-4495-8917-316b3ab16ce6 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 717.775726] env[62070]: WARNING nova.compute.manager [req-0f5840fc-9c09-47aa-9059-8106019d11bc req-b5aa3fc7-f33e-413f-ae7b-24de19362e04 service nova] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Received unexpected event network-vif-plugged-e23bf645-d900-4495-8917-316b3ab16ce6 for instance with vm_state building and task_state spawning. [ 717.776021] env[62070]: DEBUG nova.compute.manager [req-0f5840fc-9c09-47aa-9059-8106019d11bc req-b5aa3fc7-f33e-413f-ae7b-24de19362e04 service nova] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Received event network-changed-e23bf645-d900-4495-8917-316b3ab16ce6 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 717.777585] env[62070]: DEBUG nova.compute.manager [req-0f5840fc-9c09-47aa-9059-8106019d11bc req-b5aa3fc7-f33e-413f-ae7b-24de19362e04 service nova] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Refreshing instance network info cache due to event network-changed-e23bf645-d900-4495-8917-316b3ab16ce6. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 717.777950] env[62070]: DEBUG oslo_concurrency.lockutils [req-0f5840fc-9c09-47aa-9059-8106019d11bc req-b5aa3fc7-f33e-413f-ae7b-24de19362e04 service nova] Acquiring lock "refresh_cache-d0914f90-200c-4715-aaab-54beacf339b9" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 717.882050] env[62070]: DEBUG nova.scheduler.client.report [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 717.899195] env[62070]: DEBUG nova.network.neutron [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Updating instance_info_cache with network_info: [{"id": "e23bf645-d900-4495-8917-316b3ab16ce6", "address": "fa:16:3e:14:0c:da", "network": {"id": "0c106f8c-7353-4c61-9675-8e9c2ae5dddf", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1747568824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "49dd924c6f8e4a78bb8d57c805ab40f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape23bf645-d9", "ovs_interfaceid": "e23bf645-d900-4495-8917-316b3ab16ce6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 718.076951] env[62070]: DEBUG nova.compute.manager [req-f1b241f2-d60e-4170-86a2-8936f35ad233 req-90bbba03-e6db-4688-b39e-49b848ac6184 service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Received event network-changed-6326b098-3c76-4152-b623-8921285ec01b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 718.077176] env[62070]: DEBUG nova.compute.manager [req-f1b241f2-d60e-4170-86a2-8936f35ad233 req-90bbba03-e6db-4688-b39e-49b848ac6184 service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Refreshing instance network info cache due to event network-changed-6326b098-3c76-4152-b623-8921285ec01b. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 718.077524] env[62070]: DEBUG oslo_concurrency.lockutils [req-f1b241f2-d60e-4170-86a2-8936f35ad233 req-90bbba03-e6db-4688-b39e-49b848ac6184 service nova] Acquiring lock "refresh_cache-0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.077524] env[62070]: DEBUG oslo_concurrency.lockutils [req-f1b241f2-d60e-4170-86a2-8936f35ad233 req-90bbba03-e6db-4688-b39e-49b848ac6184 service nova] Acquired lock "refresh_cache-0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.077671] env[62070]: DEBUG nova.network.neutron [req-f1b241f2-d60e-4170-86a2-8936f35ad233 req-90bbba03-e6db-4688-b39e-49b848ac6184 service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Refreshing network info cache for port 6326b098-3c76-4152-b623-8921285ec01b {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 718.120536] env[62070]: INFO nova.compute.manager [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] [instance: 2c1dfa78-d300-4505-9f87-8e11a4973af3] Took 1.04 seconds to deallocate network for instance. [ 718.227156] env[62070]: DEBUG oslo_vmware.rw_handles [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Completed reading data from the image iterator. {{(pid=62070) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 718.227448] env[62070]: DEBUG oslo_vmware.rw_handles [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e6a60a36-35cb-4e02-ab1d-01daf73cb2f2/43ea607c-7ece-4601-9b11-75c6a16aa7dd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62070) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 718.257184] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.388866] env[62070]: DEBUG nova.virt.vmwareapi.images [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Downloaded image file data 43ea607c-7ece-4601-9b11-75c6a16aa7dd to vmware_temp/e6a60a36-35cb-4e02-ab1d-01daf73cb2f2/43ea607c-7ece-4601-9b11-75c6a16aa7dd/tmp-sparse.vmdk on the data store datastore2 {{(pid=62070) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 718.389637] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Caching image {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 718.389930] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Copying Virtual Disk [datastore2] vmware_temp/e6a60a36-35cb-4e02-ab1d-01daf73cb2f2/43ea607c-7ece-4601-9b11-75c6a16aa7dd/tmp-sparse.vmdk to [datastore2] vmware_temp/e6a60a36-35cb-4e02-ab1d-01daf73cb2f2/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 718.390834] env[62070]: DEBUG oslo_concurrency.lockutils [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.548s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 718.391794] env[62070]: DEBUG nova.compute.manager [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 718.396327] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0013f4c3-095a-4b47-8138-ac0f86d54acd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.401113] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.413s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 718.402704] env[62070]: INFO nova.compute.claims [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 718.410054] env[62070]: DEBUG oslo_concurrency.lockutils [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Releasing lock "refresh_cache-d0914f90-200c-4715-aaab-54beacf339b9" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.411068] env[62070]: DEBUG nova.compute.manager [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Instance network_info: |[{"id": "e23bf645-d900-4495-8917-316b3ab16ce6", "address": "fa:16:3e:14:0c:da", "network": {"id": "0c106f8c-7353-4c61-9675-8e9c2ae5dddf", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1747568824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "49dd924c6f8e4a78bb8d57c805ab40f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape23bf645-d9", "ovs_interfaceid": "e23bf645-d900-4495-8917-316b3ab16ce6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 718.411068] env[62070]: DEBUG oslo_concurrency.lockutils [req-0f5840fc-9c09-47aa-9059-8106019d11bc req-b5aa3fc7-f33e-413f-ae7b-24de19362e04 service nova] Acquired lock "refresh_cache-d0914f90-200c-4715-aaab-54beacf339b9" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.411320] env[62070]: DEBUG nova.network.neutron [req-0f5840fc-9c09-47aa-9059-8106019d11bc req-b5aa3fc7-f33e-413f-ae7b-24de19362e04 service nova] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Refreshing network info cache for port e23bf645-d900-4495-8917-316b3ab16ce6 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 718.411891] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:0c:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '415e68b4-3766-4359-afe2-f8563910d98c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e23bf645-d900-4495-8917-316b3ab16ce6', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 718.420584] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Creating folder: Project (49dd924c6f8e4a78bb8d57c805ab40f3). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 718.422585] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f4f5f515-dbff-4bd6-8aba-61c328b72c9a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.424461] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 718.424461] env[62070]: value = "task-1121452" [ 718.424461] env[62070]: _type = "Task" [ 718.424461] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.433870] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Created folder: Project (49dd924c6f8e4a78bb8d57c805ab40f3) in parent group-v245319. [ 718.434133] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Creating folder: Instances. Parent ref: group-v245336. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 718.434700] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c0d62cd-a86f-4120-96b0-9cdb9eb50463 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.442762] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121452, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.453173] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Created folder: Instances in parent group-v245336. [ 718.453173] env[62070]: DEBUG oslo.service.loopingcall [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 718.453334] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 718.453565] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-90b5f930-ed5f-4ca5-abd2-7d015e1ea813 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.477890] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 718.477890] env[62070]: value = "task-1121455" [ 718.477890] env[62070]: _type = "Task" [ 718.477890] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.487116] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121455, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.903017] env[62070]: DEBUG nova.compute.utils [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 718.906192] env[62070]: DEBUG nova.compute.manager [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 718.906192] env[62070]: DEBUG nova.network.neutron [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 718.935388] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121452, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.994487] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121455, 'name': CreateVM_Task, 'duration_secs': 0.358178} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.994711] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 718.995433] env[62070]: DEBUG oslo_concurrency.lockutils [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.995677] env[62070]: DEBUG oslo_concurrency.lockutils [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.996033] env[62070]: DEBUG oslo_concurrency.lockutils [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 718.996382] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff4f8c8f-257b-4089-b8c3-edad00a310a1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.007863] env[62070]: DEBUG oslo_vmware.api [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Waiting for the task: (returnval){ [ 719.007863] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ebc83a-7489-87e8-7c5d-eae83817f7f6" [ 719.007863] env[62070]: _type = "Task" [ 719.007863] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.012910] env[62070]: DEBUG nova.policy [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4cc78192fe70440c8b8d29d4aa3eaf2e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9542e7a0d2af459b9a7bd8ae65de8a8c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 719.020421] env[62070]: DEBUG oslo_concurrency.lockutils [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.020732] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 719.020958] env[62070]: DEBUG oslo_concurrency.lockutils [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.158665] env[62070]: DEBUG nova.network.neutron [req-f1b241f2-d60e-4170-86a2-8936f35ad233 req-90bbba03-e6db-4688-b39e-49b848ac6184 service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Updated VIF entry in instance network info cache for port 6326b098-3c76-4152-b623-8921285ec01b. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 719.159337] env[62070]: DEBUG nova.network.neutron [req-f1b241f2-d60e-4170-86a2-8936f35ad233 req-90bbba03-e6db-4688-b39e-49b848ac6184 service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Updating instance_info_cache with network_info: [{"id": "6326b098-3c76-4152-b623-8921285ec01b", "address": "fa:16:3e:cc:12:22", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6326b098-3c", "ovs_interfaceid": "6326b098-3c76-4152-b623-8921285ec01b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.168529] env[62070]: INFO nova.scheduler.client.report [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Deleted allocations for instance 2c1dfa78-d300-4505-9f87-8e11a4973af3 [ 719.407292] env[62070]: DEBUG nova.compute.manager [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 719.436558] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121452, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.009101} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.436826] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Copied Virtual Disk [datastore2] vmware_temp/e6a60a36-35cb-4e02-ab1d-01daf73cb2f2/43ea607c-7ece-4601-9b11-75c6a16aa7dd/tmp-sparse.vmdk to [datastore2] vmware_temp/e6a60a36-35cb-4e02-ab1d-01daf73cb2f2/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 719.437009] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Deleting the datastore file [datastore2] vmware_temp/e6a60a36-35cb-4e02-ab1d-01daf73cb2f2/43ea607c-7ece-4601-9b11-75c6a16aa7dd/tmp-sparse.vmdk {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 719.437264] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e8c74bed-9efd-436f-a767-c4d4199ba805 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.443246] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 719.443246] env[62070]: value = "task-1121456" [ 719.443246] env[62070]: _type = "Task" [ 719.443246] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.451866] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121456, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.505244] env[62070]: DEBUG nova.network.neutron [req-0f5840fc-9c09-47aa-9059-8106019d11bc req-b5aa3fc7-f33e-413f-ae7b-24de19362e04 service nova] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Updated VIF entry in instance network info cache for port e23bf645-d900-4495-8917-316b3ab16ce6. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 719.505563] env[62070]: DEBUG nova.network.neutron [req-0f5840fc-9c09-47aa-9059-8106019d11bc req-b5aa3fc7-f33e-413f-ae7b-24de19362e04 service nova] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Updating instance_info_cache with network_info: [{"id": "e23bf645-d900-4495-8917-316b3ab16ce6", "address": "fa:16:3e:14:0c:da", "network": {"id": "0c106f8c-7353-4c61-9675-8e9c2ae5dddf", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1747568824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "49dd924c6f8e4a78bb8d57c805ab40f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape23bf645-d9", "ovs_interfaceid": "e23bf645-d900-4495-8917-316b3ab16ce6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.621922] env[62070]: DEBUG nova.network.neutron [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Successfully created port: 02efa9f5-65b4-4a93-a5d1-a5a15f0d6da0 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 719.662794] env[62070]: DEBUG oslo_concurrency.lockutils [req-f1b241f2-d60e-4170-86a2-8936f35ad233 req-90bbba03-e6db-4688-b39e-49b848ac6184 service nova] Releasing lock "refresh_cache-0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.677538] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b596e62b-91d4-4a7e-a714-611f34958df6 tempest-FloatingIPsAssociationTestJSON-86130693 tempest-FloatingIPsAssociationTestJSON-86130693-project-member] Lock "2c1dfa78-d300-4505-9f87-8e11a4973af3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 126.589s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 719.897913] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f932242b-e208-4c0c-86f4-011d4ed7bb44 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.909346] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b60609a-c987-4ec3-b4d1-b435f86462d8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.943442] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75762609-684c-4e73-9bdc-ed25e1531796 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.959040] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba0ab483-a209-40da-99c1-10b2a9dce76c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.963757] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121456, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101686} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.964174] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 719.964386] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Moving file from [datastore2] vmware_temp/e6a60a36-35cb-4e02-ab1d-01daf73cb2f2/43ea607c-7ece-4601-9b11-75c6a16aa7dd to [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd. {{(pid=62070) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 719.965178] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-69af5182-8424-48ce-a2e5-8ca2ee34c3c0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.975642] env[62070]: DEBUG nova.compute.provider_tree [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 719.981625] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 719.981625] env[62070]: value = "task-1121457" [ 719.981625] env[62070]: _type = "Task" [ 719.981625] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.990280] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121457, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.009642] env[62070]: DEBUG oslo_concurrency.lockutils [req-0f5840fc-9c09-47aa-9059-8106019d11bc req-b5aa3fc7-f33e-413f-ae7b-24de19362e04 service nova] Releasing lock "refresh_cache-d0914f90-200c-4715-aaab-54beacf339b9" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.180422] env[62070]: DEBUG nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 720.444741] env[62070]: DEBUG nova.compute.manager [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 720.471067] env[62070]: DEBUG nova.virt.hardware [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 720.471721] env[62070]: DEBUG nova.virt.hardware [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 720.471890] env[62070]: DEBUG nova.virt.hardware [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 720.472095] env[62070]: DEBUG nova.virt.hardware [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 720.472245] env[62070]: DEBUG nova.virt.hardware [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 720.472391] env[62070]: DEBUG nova.virt.hardware [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 720.472601] env[62070]: DEBUG nova.virt.hardware [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 720.472756] env[62070]: DEBUG nova.virt.hardware [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 720.472961] env[62070]: DEBUG nova.virt.hardware [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 720.473378] env[62070]: DEBUG nova.virt.hardware [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 720.473614] env[62070]: DEBUG nova.virt.hardware [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 720.474525] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f675f987-1f08-45b6-a308-92a850ffce3b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.478196] env[62070]: DEBUG nova.scheduler.client.report [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 720.488879] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d5267fa-f650-4166-b7f7-9fed30312b2d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.501769] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121457, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.035731} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.512141] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] File moved {{(pid=62070) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 720.512141] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Cleaning up location [datastore2] vmware_temp/e6a60a36-35cb-4e02-ab1d-01daf73cb2f2 {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 720.512141] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Deleting the datastore file [datastore2] vmware_temp/e6a60a36-35cb-4e02-ab1d-01daf73cb2f2 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 720.512141] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bc637544-a2e4-497b-9d1a-a52443aca6ab {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.517718] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 720.517718] env[62070]: value = "task-1121458" [ 720.517718] env[62070]: _type = "Task" [ 720.517718] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.526439] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121458, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.700594] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 720.983758] env[62070]: DEBUG oslo_concurrency.lockutils [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 720.984044] env[62070]: DEBUG oslo_concurrency.lockutils [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 720.985715] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.585s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 720.986152] env[62070]: DEBUG nova.compute.manager [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 720.992293] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.864s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 720.997262] env[62070]: INFO nova.compute.claims [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 721.028274] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121458, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.023897} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.028499] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 721.029220] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d0666dd-5424-4f98-bb9b-9d8f29fd5b96 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.035434] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 721.035434] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52959710-92e4-de6f-5c38-98785a7332b1" [ 721.035434] env[62070]: _type = "Task" [ 721.035434] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.045823] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52959710-92e4-de6f-5c38-98785a7332b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.351409] env[62070]: DEBUG nova.compute.manager [req-4432d745-53e8-4d66-84b8-ed974ab66775 req-1c23c2d0-53ea-4a24-ac16-5f3cea81f2c8 service nova] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Received event network-vif-plugged-02efa9f5-65b4-4a93-a5d1-a5a15f0d6da0 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 721.351637] env[62070]: DEBUG oslo_concurrency.lockutils [req-4432d745-53e8-4d66-84b8-ed974ab66775 req-1c23c2d0-53ea-4a24-ac16-5f3cea81f2c8 service nova] Acquiring lock "30d782e4-30c7-41f6-b30d-95a9a59cf83c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.351880] env[62070]: DEBUG oslo_concurrency.lockutils [req-4432d745-53e8-4d66-84b8-ed974ab66775 req-1c23c2d0-53ea-4a24-ac16-5f3cea81f2c8 service nova] Lock "30d782e4-30c7-41f6-b30d-95a9a59cf83c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.352069] env[62070]: DEBUG oslo_concurrency.lockutils [req-4432d745-53e8-4d66-84b8-ed974ab66775 req-1c23c2d0-53ea-4a24-ac16-5f3cea81f2c8 service nova] Lock "30d782e4-30c7-41f6-b30d-95a9a59cf83c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.352239] env[62070]: DEBUG nova.compute.manager [req-4432d745-53e8-4d66-84b8-ed974ab66775 req-1c23c2d0-53ea-4a24-ac16-5f3cea81f2c8 service nova] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] No waiting events found dispatching network-vif-plugged-02efa9f5-65b4-4a93-a5d1-a5a15f0d6da0 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 721.352399] env[62070]: WARNING nova.compute.manager [req-4432d745-53e8-4d66-84b8-ed974ab66775 req-1c23c2d0-53ea-4a24-ac16-5f3cea81f2c8 service nova] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Received unexpected event network-vif-plugged-02efa9f5-65b4-4a93-a5d1-a5a15f0d6da0 for instance with vm_state building and task_state spawning. [ 721.458340] env[62070]: DEBUG nova.network.neutron [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Successfully updated port: 02efa9f5-65b4-4a93-a5d1-a5a15f0d6da0 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 721.502194] env[62070]: DEBUG nova.compute.utils [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 721.505851] env[62070]: DEBUG nova.compute.manager [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 721.506061] env[62070]: DEBUG nova.network.neutron [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 721.546495] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52959710-92e4-de6f-5c38-98785a7332b1, 'name': SearchDatastore_Task, 'duration_secs': 0.01008} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.546757] env[62070]: DEBUG oslo_concurrency.lockutils [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 721.547016] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 71aead12-a182-40a7-b5a9-91c01271b800/71aead12-a182-40a7-b5a9-91c01271b800.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 721.547296] env[62070]: DEBUG oslo_concurrency.lockutils [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.547483] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 721.547691] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7698d6f4-786f-4a2f-acf0-4f4ee661931f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.551921] env[62070]: DEBUG nova.policy [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0aa820b3e16d4d6fbc6bda0b232025fc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c91e5eeeeb1742f499b2edaf76a93a3b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 721.553367] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-54d5bc45-4c0b-4674-ab18-b3a6c10196ef {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.562403] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 721.562403] env[62070]: value = "task-1121459" [ 721.562403] env[62070]: _type = "Task" [ 721.562403] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.563616] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 721.563783] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 721.568386] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-434d6e43-c8ec-46b7-8ed1-90639116a17b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.573761] env[62070]: DEBUG oslo_vmware.api [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Waiting for the task: (returnval){ [ 721.573761] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]521bfd5d-3d8b-0679-5a39-32acc489e0b3" [ 721.573761] env[62070]: _type = "Task" [ 721.573761] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.576726] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121459, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.584986] env[62070]: DEBUG oslo_vmware.api [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]521bfd5d-3d8b-0679-5a39-32acc489e0b3, 'name': SearchDatastore_Task, 'duration_secs': 0.008921} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.585279] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-729db981-7023-4e28-b912-e24b21701071 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.591173] env[62070]: DEBUG oslo_vmware.api [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Waiting for the task: (returnval){ [ 721.591173] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]521f1e49-24f5-5d68-a27e-1ed640de9d8d" [ 721.591173] env[62070]: _type = "Task" [ 721.591173] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.600451] env[62070]: DEBUG oslo_vmware.api [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]521f1e49-24f5-5d68-a27e-1ed640de9d8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.962386] env[62070]: DEBUG oslo_concurrency.lockutils [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Acquiring lock "refresh_cache-30d782e4-30c7-41f6-b30d-95a9a59cf83c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.962386] env[62070]: DEBUG oslo_concurrency.lockutils [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Acquired lock "refresh_cache-30d782e4-30c7-41f6-b30d-95a9a59cf83c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.962386] env[62070]: DEBUG nova.network.neutron [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 722.007368] env[62070]: DEBUG nova.compute.manager [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 722.020318] env[62070]: DEBUG nova.network.neutron [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Successfully created port: e01eb485-1347-4afb-b881-62797a5b84af {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 722.080890] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121459, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488458} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.081204] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 71aead12-a182-40a7-b5a9-91c01271b800/71aead12-a182-40a7-b5a9-91c01271b800.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 722.081424] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 722.082383] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-99ade494-0933-4251-b128-cda4aa3eaab7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.088701] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 722.088701] env[62070]: value = "task-1121460" [ 722.088701] env[62070]: _type = "Task" [ 722.088701] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.103071] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121460, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.107151] env[62070]: DEBUG oslo_vmware.api [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]521f1e49-24f5-5d68-a27e-1ed640de9d8d, 'name': SearchDatastore_Task, 'duration_secs': 0.007718} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.107960] env[62070]: DEBUG oslo_concurrency.lockutils [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 722.107960] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] d0914f90-200c-4715-aaab-54beacf339b9/d0914f90-200c-4715-aaab-54beacf339b9.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 722.107960] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-041e20ef-b387-469a-9cb8-c8fdf25c1fda {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.117894] env[62070]: DEBUG oslo_vmware.api [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Waiting for the task: (returnval){ [ 722.117894] env[62070]: value = "task-1121461" [ 722.117894] env[62070]: _type = "Task" [ 722.117894] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.126650] env[62070]: DEBUG oslo_vmware.api [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Task: {'id': task-1121461, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.504077] env[62070]: DEBUG nova.network.neutron [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 722.600635] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121460, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071841} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.600635] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 722.600635] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa5b5dd-cc64-4ded-bc4d-2418435cc1db {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.639109] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Reconfiguring VM instance instance-00000022 to attach disk [datastore2] 71aead12-a182-40a7-b5a9-91c01271b800/71aead12-a182-40a7-b5a9-91c01271b800.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 722.642899] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99548f8e-0122-4a57-8379-ad49e6b3a883 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.659432] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5ce839-a15e-4c3a-b9ea-3ea9aac944c4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.677762] env[62070]: DEBUG oslo_vmware.api [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Task: {'id': task-1121461, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.537334} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.679269] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] d0914f90-200c-4715-aaab-54beacf339b9/d0914f90-200c-4715-aaab-54beacf339b9.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 722.679533] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 722.679834] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 722.679834] env[62070]: value = "task-1121462" [ 722.679834] env[62070]: _type = "Task" [ 722.679834] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.680060] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c98d8d54-e028-43be-8f29-2ebbf16ad9f3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.687505] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b95356-49df-4ea0-8d50-e4f388800655 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.691817] env[62070]: DEBUG oslo_vmware.api [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Waiting for the task: (returnval){ [ 722.691817] env[62070]: value = "task-1121463" [ 722.691817] env[62070]: _type = "Task" [ 722.691817] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.721886] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121462, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.723361] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afad3d44-57d7-4aa2-ab93-d36443675462 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.729481] env[62070]: DEBUG nova.network.neutron [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Updating instance_info_cache with network_info: [{"id": "02efa9f5-65b4-4a93-a5d1-a5a15f0d6da0", "address": "fa:16:3e:5c:56:09", "network": {"id": "66dc409f-aa65-4c01-a676-9721c8490d93", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1492965002-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9542e7a0d2af459b9a7bd8ae65de8a8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02efa9f5-65", "ovs_interfaceid": "02efa9f5-65b4-4a93-a5d1-a5a15f0d6da0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.730710] env[62070]: DEBUG oslo_vmware.api [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Task: {'id': task-1121463, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.735952] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6525efa2-7064-497a-90c0-0c16a51d985e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.751779] env[62070]: DEBUG nova.compute.provider_tree [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 723.024162] env[62070]: DEBUG nova.compute.manager [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 723.053750] env[62070]: DEBUG nova.virt.hardware [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 723.053995] env[62070]: DEBUG nova.virt.hardware [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 723.054159] env[62070]: DEBUG nova.virt.hardware [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 723.054333] env[62070]: DEBUG nova.virt.hardware [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 723.054472] env[62070]: DEBUG nova.virt.hardware [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 723.054611] env[62070]: DEBUG nova.virt.hardware [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 723.054807] env[62070]: DEBUG nova.virt.hardware [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 723.054957] env[62070]: DEBUG nova.virt.hardware [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 723.055129] env[62070]: DEBUG nova.virt.hardware [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 723.055285] env[62070]: DEBUG nova.virt.hardware [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 723.055491] env[62070]: DEBUG nova.virt.hardware [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 723.056340] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32488702-7ec1-4dc2-8000-ee60c1a26060 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.063902] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-608517b7-cfab-430b-b220-04650274bb7f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.191612] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121462, 'name': ReconfigVM_Task, 'duration_secs': 0.287153} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.191889] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Reconfigured VM instance instance-00000022 to attach disk [datastore2] 71aead12-a182-40a7-b5a9-91c01271b800/71aead12-a182-40a7-b5a9-91c01271b800.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 723.192546] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aec3cc54-a584-4bdc-bd33-9d829f8410ed {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.201574] env[62070]: DEBUG oslo_vmware.api [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Task: {'id': task-1121463, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067148} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.202794] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 723.203124] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 723.203124] env[62070]: value = "task-1121464" [ 723.203124] env[62070]: _type = "Task" [ 723.203124] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.203755] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f617c6-8e1f-4901-82ed-93a3f6e76120 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.213287] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121464, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.230528] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Reconfiguring VM instance instance-00000023 to attach disk [datastore2] d0914f90-200c-4715-aaab-54beacf339b9/d0914f90-200c-4715-aaab-54beacf339b9.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 723.230877] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-248e56e8-a1f0-439f-aef2-c3963273b4fe {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.244741] env[62070]: DEBUG oslo_concurrency.lockutils [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Releasing lock "refresh_cache-30d782e4-30c7-41f6-b30d-95a9a59cf83c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 723.246057] env[62070]: DEBUG nova.compute.manager [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Instance network_info: |[{"id": "02efa9f5-65b4-4a93-a5d1-a5a15f0d6da0", "address": "fa:16:3e:5c:56:09", "network": {"id": "66dc409f-aa65-4c01-a676-9721c8490d93", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1492965002-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9542e7a0d2af459b9a7bd8ae65de8a8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02efa9f5-65", "ovs_interfaceid": "02efa9f5-65b4-4a93-a5d1-a5a15f0d6da0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 723.247279] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:56:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '10ff2092-e8eb-4768-ad4a-65a80560b447', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '02efa9f5-65b4-4a93-a5d1-a5a15f0d6da0', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 723.254585] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Creating folder: Project (9542e7a0d2af459b9a7bd8ae65de8a8c). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 723.255329] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5238829c-56da-441d-8d65-6712fa694c32 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.257588] env[62070]: DEBUG nova.scheduler.client.report [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 723.262552] env[62070]: DEBUG oslo_vmware.api [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Waiting for the task: (returnval){ [ 723.262552] env[62070]: value = "task-1121465" [ 723.262552] env[62070]: _type = "Task" [ 723.262552] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.271640] env[62070]: DEBUG oslo_vmware.api [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Task: {'id': task-1121465, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.273501] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Created folder: Project (9542e7a0d2af459b9a7bd8ae65de8a8c) in parent group-v245319. [ 723.273690] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Creating folder: Instances. Parent ref: group-v245339. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 723.274185] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bde4afaf-7247-4f2e-88ee-8cdfc4a75d21 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.282608] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Created folder: Instances in parent group-v245339. [ 723.282608] env[62070]: DEBUG oslo.service.loopingcall [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 723.282843] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 723.283019] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6166298c-f45b-43e6-8e05-9f1ac415ada6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.302030] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 723.302030] env[62070]: value = "task-1121468" [ 723.302030] env[62070]: _type = "Task" [ 723.302030] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.309657] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121468, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.379589] env[62070]: DEBUG nova.compute.manager [req-9b30bbb2-61fa-4761-a426-cf62e8c5c398 req-680a5117-4af0-49f3-a700-0be8b36b4116 service nova] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Received event network-changed-02efa9f5-65b4-4a93-a5d1-a5a15f0d6da0 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 723.379777] env[62070]: DEBUG nova.compute.manager [req-9b30bbb2-61fa-4761-a426-cf62e8c5c398 req-680a5117-4af0-49f3-a700-0be8b36b4116 service nova] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Refreshing instance network info cache due to event network-changed-02efa9f5-65b4-4a93-a5d1-a5a15f0d6da0. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 723.379992] env[62070]: DEBUG oslo_concurrency.lockutils [req-9b30bbb2-61fa-4761-a426-cf62e8c5c398 req-680a5117-4af0-49f3-a700-0be8b36b4116 service nova] Acquiring lock "refresh_cache-30d782e4-30c7-41f6-b30d-95a9a59cf83c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.380152] env[62070]: DEBUG oslo_concurrency.lockutils [req-9b30bbb2-61fa-4761-a426-cf62e8c5c398 req-680a5117-4af0-49f3-a700-0be8b36b4116 service nova] Acquired lock "refresh_cache-30d782e4-30c7-41f6-b30d-95a9a59cf83c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.380311] env[62070]: DEBUG nova.network.neutron [req-9b30bbb2-61fa-4761-a426-cf62e8c5c398 req-680a5117-4af0-49f3-a700-0be8b36b4116 service nova] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Refreshing network info cache for port 02efa9f5-65b4-4a93-a5d1-a5a15f0d6da0 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 723.716111] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121464, 'name': Rename_Task, 'duration_secs': 0.157244} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.716111] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 723.716111] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b5d89f3d-c8af-4695-be08-ba49a0622d6c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.721921] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 723.721921] env[62070]: value = "task-1121469" [ 723.721921] env[62070]: _type = "Task" [ 723.721921] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.730228] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121469, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.764385] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.775s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 723.765058] env[62070]: DEBUG nova.compute.manager [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 723.770734] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.686s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 723.777024] env[62070]: INFO nova.compute.claims [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 723.788563] env[62070]: DEBUG nova.network.neutron [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Successfully updated port: e01eb485-1347-4afb-b881-62797a5b84af {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 723.799982] env[62070]: DEBUG oslo_vmware.api [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Task: {'id': task-1121465, 'name': ReconfigVM_Task, 'duration_secs': 0.282079} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.800299] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Reconfigured VM instance instance-00000023 to attach disk [datastore2] d0914f90-200c-4715-aaab-54beacf339b9/d0914f90-200c-4715-aaab-54beacf339b9.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 723.800971] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-28773222-30ca-48e5-a613-4e2a4ee26f70 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.816077] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121468, 'name': CreateVM_Task, 'duration_secs': 0.386895} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.816405] env[62070]: DEBUG oslo_vmware.api [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Waiting for the task: (returnval){ [ 723.816405] env[62070]: value = "task-1121470" [ 723.816405] env[62070]: _type = "Task" [ 723.816405] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.816582] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 723.820018] env[62070]: DEBUG oslo_concurrency.lockutils [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.820018] env[62070]: DEBUG oslo_concurrency.lockutils [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.820018] env[62070]: DEBUG oslo_concurrency.lockutils [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 723.821207] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed2e79e4-429d-4f00-b0d2-eb43e1d96f25 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.828603] env[62070]: DEBUG oslo_vmware.api [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Waiting for the task: (returnval){ [ 723.828603] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52153305-abd0-2408-66e6-433da72cd734" [ 723.828603] env[62070]: _type = "Task" [ 723.828603] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.830449] env[62070]: DEBUG oslo_vmware.api [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Task: {'id': task-1121470, 'name': Rename_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.838448] env[62070]: DEBUG oslo_vmware.api [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52153305-abd0-2408-66e6-433da72cd734, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.186040] env[62070]: DEBUG nova.network.neutron [req-9b30bbb2-61fa-4761-a426-cf62e8c5c398 req-680a5117-4af0-49f3-a700-0be8b36b4116 service nova] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Updated VIF entry in instance network info cache for port 02efa9f5-65b4-4a93-a5d1-a5a15f0d6da0. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 724.186426] env[62070]: DEBUG nova.network.neutron [req-9b30bbb2-61fa-4761-a426-cf62e8c5c398 req-680a5117-4af0-49f3-a700-0be8b36b4116 service nova] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Updating instance_info_cache with network_info: [{"id": "02efa9f5-65b4-4a93-a5d1-a5a15f0d6da0", "address": "fa:16:3e:5c:56:09", "network": {"id": "66dc409f-aa65-4c01-a676-9721c8490d93", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1492965002-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9542e7a0d2af459b9a7bd8ae65de8a8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02efa9f5-65", "ovs_interfaceid": "02efa9f5-65b4-4a93-a5d1-a5a15f0d6da0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.232579] env[62070]: DEBUG oslo_vmware.api [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121469, 'name': PowerOnVM_Task, 'duration_secs': 0.476242} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.232845] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 724.233037] env[62070]: INFO nova.compute.manager [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Took 11.00 seconds to spawn the instance on the hypervisor. [ 724.233215] env[62070]: DEBUG nova.compute.manager [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 724.234015] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36654774-8fa6-4b1f-916c-f59fa8ae7775 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.291436] env[62070]: DEBUG nova.compute.utils [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 724.292799] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "refresh_cache-67e99ada-a8e6-4034-b19b-5b2cb883b735" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.292906] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquired lock "refresh_cache-67e99ada-a8e6-4034-b19b-5b2cb883b735" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.292979] env[62070]: DEBUG nova.network.neutron [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 724.294458] env[62070]: DEBUG nova.compute.manager [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 724.294621] env[62070]: DEBUG nova.network.neutron [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 724.326701] env[62070]: DEBUG oslo_vmware.api [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Task: {'id': task-1121470, 'name': Rename_Task, 'duration_secs': 0.14522} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.327019] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 724.327293] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e18c6b99-6301-403d-bbd8-303dcde0480f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.331684] env[62070]: DEBUG nova.policy [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b47f99a84e374b8497de25413344f035', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2ecbd5f22c024de8a6b1c45096cb79a7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 724.336979] env[62070]: DEBUG oslo_vmware.api [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Waiting for the task: (returnval){ [ 724.336979] env[62070]: value = "task-1121471" [ 724.336979] env[62070]: _type = "Task" [ 724.336979] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.345406] env[62070]: DEBUG oslo_vmware.api [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52153305-abd0-2408-66e6-433da72cd734, 'name': SearchDatastore_Task, 'duration_secs': 0.01877} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.345883] env[62070]: DEBUG oslo_concurrency.lockutils [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.346135] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 724.346404] env[62070]: DEBUG oslo_concurrency.lockutils [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.346519] env[62070]: DEBUG oslo_concurrency.lockutils [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.346691] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 724.346936] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad344c8d-e9cd-4ca7-a385-a836b7f913c5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.351426] env[62070]: DEBUG oslo_vmware.api [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Task: {'id': task-1121471, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.358014] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 724.358014] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 724.360910] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6685b143-f9c5-455f-9f3b-a1c9e6d0fa4d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.363602] env[62070]: DEBUG oslo_vmware.api [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Waiting for the task: (returnval){ [ 724.363602] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52c6607e-5018-fd62-3f5a-9dff6bc55759" [ 724.363602] env[62070]: _type = "Task" [ 724.363602] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.371031] env[62070]: DEBUG oslo_vmware.api [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52c6607e-5018-fd62-3f5a-9dff6bc55759, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.618339] env[62070]: DEBUG nova.network.neutron [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Successfully created port: eaa2c81a-41e9-4fe8-b2b0-9977156d1505 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 724.689244] env[62070]: DEBUG oslo_concurrency.lockutils [req-9b30bbb2-61fa-4761-a426-cf62e8c5c398 req-680a5117-4af0-49f3-a700-0be8b36b4116 service nova] Releasing lock "refresh_cache-30d782e4-30c7-41f6-b30d-95a9a59cf83c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.752700] env[62070]: INFO nova.compute.manager [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Took 47.44 seconds to build instance. [ 724.795891] env[62070]: DEBUG nova.compute.manager [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 724.850395] env[62070]: DEBUG oslo_vmware.api [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Task: {'id': task-1121471, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.876466] env[62070]: DEBUG oslo_vmware.api [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52c6607e-5018-fd62-3f5a-9dff6bc55759, 'name': SearchDatastore_Task, 'duration_secs': 0.008092} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.877281] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7541cd9-3ba2-4f1a-bf21-be55eaae62e6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.884456] env[62070]: DEBUG nova.network.neutron [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 724.887569] env[62070]: DEBUG oslo_vmware.api [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Waiting for the task: (returnval){ [ 724.887569] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]524abdc0-0d0f-3b99-5b3e-4ca55d60fb3a" [ 724.887569] env[62070]: _type = "Task" [ 724.887569] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.899701] env[62070]: DEBUG oslo_vmware.api [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]524abdc0-0d0f-3b99-5b3e-4ca55d60fb3a, 'name': SearchDatastore_Task, 'duration_secs': 0.008631} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.899963] env[62070]: DEBUG oslo_concurrency.lockutils [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.903193] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 30d782e4-30c7-41f6-b30d-95a9a59cf83c/30d782e4-30c7-41f6-b30d-95a9a59cf83c.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 724.903480] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4483fdf9-f231-4d98-bf7e-f73c48f2115f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.912845] env[62070]: DEBUG oslo_vmware.api [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Waiting for the task: (returnval){ [ 724.912845] env[62070]: value = "task-1121472" [ 724.912845] env[62070]: _type = "Task" [ 724.912845] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.921021] env[62070]: DEBUG oslo_vmware.api [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Task: {'id': task-1121472, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.132186] env[62070]: DEBUG nova.network.neutron [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Updating instance_info_cache with network_info: [{"id": "e01eb485-1347-4afb-b881-62797a5b84af", "address": "fa:16:3e:d1:62:ff", "network": {"id": "0d81bd04-b549-4e1f-97a2-0a0b9391dd3f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-108214409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c91e5eeeeb1742f499b2edaf76a93a3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape01eb485-13", "ovs_interfaceid": "e01eb485-1347-4afb-b881-62797a5b84af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.256322] env[62070]: DEBUG oslo_concurrency.lockutils [None req-89ead6ea-31c2-4fec-ad52-a9d968c7ad21 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "71aead12-a182-40a7-b5a9-91c01271b800" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 126.785s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.326770] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c05f60ba-e562-414e-8b28-3f2c488b98a3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.336145] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae80323e-f676-492b-b11d-04b166e91d0e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.348284] env[62070]: DEBUG oslo_vmware.api [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Task: {'id': task-1121471, 'name': PowerOnVM_Task, 'duration_secs': 0.774772} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.384262] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 725.384499] env[62070]: INFO nova.compute.manager [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Took 9.61 seconds to spawn the instance on the hypervisor. [ 725.384681] env[62070]: DEBUG nova.compute.manager [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 725.386283] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c97afc14-d74f-40c7-a980-585962dfc93a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.390660] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ea3bb89-9f4e-44dc-9644-52024e1a3acb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.407325] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32ed7512-184f-4cfc-bf5f-fef2aa742c04 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.427356] env[62070]: DEBUG nova.compute.provider_tree [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 725.434090] env[62070]: DEBUG oslo_vmware.api [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Task: {'id': task-1121472, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.498434] env[62070]: DEBUG nova.compute.manager [req-7f2361e6-c0af-4a98-912a-efe499198a9c req-8b0f8482-240e-4854-b190-05e50222ee7c service nova] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Received event network-vif-plugged-e01eb485-1347-4afb-b881-62797a5b84af {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 725.498721] env[62070]: DEBUG oslo_concurrency.lockutils [req-7f2361e6-c0af-4a98-912a-efe499198a9c req-8b0f8482-240e-4854-b190-05e50222ee7c service nova] Acquiring lock "67e99ada-a8e6-4034-b19b-5b2cb883b735-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.498921] env[62070]: DEBUG oslo_concurrency.lockutils [req-7f2361e6-c0af-4a98-912a-efe499198a9c req-8b0f8482-240e-4854-b190-05e50222ee7c service nova] Lock "67e99ada-a8e6-4034-b19b-5b2cb883b735-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.499130] env[62070]: DEBUG oslo_concurrency.lockutils [req-7f2361e6-c0af-4a98-912a-efe499198a9c req-8b0f8482-240e-4854-b190-05e50222ee7c service nova] Lock "67e99ada-a8e6-4034-b19b-5b2cb883b735-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.499376] env[62070]: DEBUG nova.compute.manager [req-7f2361e6-c0af-4a98-912a-efe499198a9c req-8b0f8482-240e-4854-b190-05e50222ee7c service nova] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] No waiting events found dispatching network-vif-plugged-e01eb485-1347-4afb-b881-62797a5b84af {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 725.500030] env[62070]: WARNING nova.compute.manager [req-7f2361e6-c0af-4a98-912a-efe499198a9c req-8b0f8482-240e-4854-b190-05e50222ee7c service nova] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Received unexpected event network-vif-plugged-e01eb485-1347-4afb-b881-62797a5b84af for instance with vm_state building and task_state spawning. [ 725.500030] env[62070]: DEBUG nova.compute.manager [req-7f2361e6-c0af-4a98-912a-efe499198a9c req-8b0f8482-240e-4854-b190-05e50222ee7c service nova] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Received event network-changed-e01eb485-1347-4afb-b881-62797a5b84af {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 725.500030] env[62070]: DEBUG nova.compute.manager [req-7f2361e6-c0af-4a98-912a-efe499198a9c req-8b0f8482-240e-4854-b190-05e50222ee7c service nova] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Refreshing instance network info cache due to event network-changed-e01eb485-1347-4afb-b881-62797a5b84af. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 725.500229] env[62070]: DEBUG oslo_concurrency.lockutils [req-7f2361e6-c0af-4a98-912a-efe499198a9c req-8b0f8482-240e-4854-b190-05e50222ee7c service nova] Acquiring lock "refresh_cache-67e99ada-a8e6-4034-b19b-5b2cb883b735" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.640115] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Releasing lock "refresh_cache-67e99ada-a8e6-4034-b19b-5b2cb883b735" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.640115] env[62070]: DEBUG nova.compute.manager [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Instance network_info: |[{"id": "e01eb485-1347-4afb-b881-62797a5b84af", "address": "fa:16:3e:d1:62:ff", "network": {"id": "0d81bd04-b549-4e1f-97a2-0a0b9391dd3f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-108214409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c91e5eeeeb1742f499b2edaf76a93a3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape01eb485-13", "ovs_interfaceid": "e01eb485-1347-4afb-b881-62797a5b84af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 725.640263] env[62070]: DEBUG oslo_concurrency.lockutils [req-7f2361e6-c0af-4a98-912a-efe499198a9c req-8b0f8482-240e-4854-b190-05e50222ee7c service nova] Acquired lock "refresh_cache-67e99ada-a8e6-4034-b19b-5b2cb883b735" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.640263] env[62070]: DEBUG nova.network.neutron [req-7f2361e6-c0af-4a98-912a-efe499198a9c req-8b0f8482-240e-4854-b190-05e50222ee7c service nova] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Refreshing network info cache for port e01eb485-1347-4afb-b881-62797a5b84af {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 725.640263] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:62:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cf5bfbae-a882-4d34-be33-b31e274b3077', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e01eb485-1347-4afb-b881-62797a5b84af', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 725.648410] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Creating folder: Project (c91e5eeeeb1742f499b2edaf76a93a3b). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 725.651930] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f8898603-245c-4bf6-b913-bbbcb5bf4557 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.662474] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Created folder: Project (c91e5eeeeb1742f499b2edaf76a93a3b) in parent group-v245319. [ 725.662842] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Creating folder: Instances. Parent ref: group-v245342. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 725.663270] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a8df64aa-2bf8-4fa1-9893-2947fe183ac2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.672384] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Created folder: Instances in parent group-v245342. [ 725.672755] env[62070]: DEBUG oslo.service.loopingcall [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 725.673068] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 725.673396] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b65c56c2-f430-42c3-8f55-943862b44c0e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.692392] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 725.692392] env[62070]: value = "task-1121475" [ 725.692392] env[62070]: _type = "Task" [ 725.692392] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.702454] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121475, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.759018] env[62070]: DEBUG nova.compute.manager [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 725.780358] env[62070]: DEBUG nova.compute.manager [req-017d056d-3e7c-4f26-9db5-5e87f0a104a2 req-2d20dd55-766e-4fcc-ad93-ee81f1f3aaae service nova] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Received event network-changed-a3ed0957-14c2-4144-8d45-f4a0e5cb45ab {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 725.781016] env[62070]: DEBUG nova.compute.manager [req-017d056d-3e7c-4f26-9db5-5e87f0a104a2 req-2d20dd55-766e-4fcc-ad93-ee81f1f3aaae service nova] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Refreshing instance network info cache due to event network-changed-a3ed0957-14c2-4144-8d45-f4a0e5cb45ab. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 725.781016] env[62070]: DEBUG oslo_concurrency.lockutils [req-017d056d-3e7c-4f26-9db5-5e87f0a104a2 req-2d20dd55-766e-4fcc-ad93-ee81f1f3aaae service nova] Acquiring lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.782849] env[62070]: DEBUG oslo_concurrency.lockutils [req-017d056d-3e7c-4f26-9db5-5e87f0a104a2 req-2d20dd55-766e-4fcc-ad93-ee81f1f3aaae service nova] Acquired lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.782849] env[62070]: DEBUG nova.network.neutron [req-017d056d-3e7c-4f26-9db5-5e87f0a104a2 req-2d20dd55-766e-4fcc-ad93-ee81f1f3aaae service nova] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Refreshing network info cache for port a3ed0957-14c2-4144-8d45-f4a0e5cb45ab {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 725.811960] env[62070]: DEBUG nova.compute.manager [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 725.846637] env[62070]: DEBUG nova.virt.hardware [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 725.846864] env[62070]: DEBUG nova.virt.hardware [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 725.847045] env[62070]: DEBUG nova.virt.hardware [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 725.847236] env[62070]: DEBUG nova.virt.hardware [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 725.847366] env[62070]: DEBUG nova.virt.hardware [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 725.847512] env[62070]: DEBUG nova.virt.hardware [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 725.847711] env[62070]: DEBUG nova.virt.hardware [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 725.848473] env[62070]: DEBUG nova.virt.hardware [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 725.848473] env[62070]: DEBUG nova.virt.hardware [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 725.848473] env[62070]: DEBUG nova.virt.hardware [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 725.848473] env[62070]: DEBUG nova.virt.hardware [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 725.849340] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a50ad4-5ead-4d45-89ce-3576bc468600 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.859711] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c6cd2d-ea2f-45d3-b4bb-f2cd882b36ae {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.921445] env[62070]: INFO nova.compute.manager [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Took 46.63 seconds to build instance. [ 725.929751] env[62070]: DEBUG nova.scheduler.client.report [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 725.932765] env[62070]: DEBUG oslo_vmware.api [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Task: {'id': task-1121472, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.729182} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.933632] env[62070]: DEBUG nova.network.neutron [req-7f2361e6-c0af-4a98-912a-efe499198a9c req-8b0f8482-240e-4854-b190-05e50222ee7c service nova] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Updated VIF entry in instance network info cache for port e01eb485-1347-4afb-b881-62797a5b84af. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 725.934856] env[62070]: DEBUG nova.network.neutron [req-7f2361e6-c0af-4a98-912a-efe499198a9c req-8b0f8482-240e-4854-b190-05e50222ee7c service nova] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Updating instance_info_cache with network_info: [{"id": "e01eb485-1347-4afb-b881-62797a5b84af", "address": "fa:16:3e:d1:62:ff", "network": {"id": "0d81bd04-b549-4e1f-97a2-0a0b9391dd3f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-108214409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c91e5eeeeb1742f499b2edaf76a93a3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape01eb485-13", "ovs_interfaceid": "e01eb485-1347-4afb-b881-62797a5b84af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.935268] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 30d782e4-30c7-41f6-b30d-95a9a59cf83c/30d782e4-30c7-41f6-b30d-95a9a59cf83c.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 725.935397] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 725.935592] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0caa1fe0-0caa-4930-9085-8548d3810637 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.943463] env[62070]: DEBUG oslo_vmware.api [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Waiting for the task: (returnval){ [ 725.943463] env[62070]: value = "task-1121476" [ 725.943463] env[62070]: _type = "Task" [ 725.943463] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.955052] env[62070]: DEBUG oslo_vmware.api [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Task: {'id': task-1121476, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.205632] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121475, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.290079] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.408256] env[62070]: DEBUG nova.network.neutron [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Successfully updated port: eaa2c81a-41e9-4fe8-b2b0-9977156d1505 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 726.424605] env[62070]: DEBUG oslo_concurrency.lockutils [None req-78392d05-754f-4818-8f15-eae4311c941b tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Lock "d0914f90-200c-4715-aaab-54beacf339b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.534s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.436214] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.668s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.436948] env[62070]: DEBUG nova.compute.manager [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 726.440718] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.989s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.442217] env[62070]: INFO nova.compute.claims [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 726.444813] env[62070]: DEBUG oslo_concurrency.lockutils [req-7f2361e6-c0af-4a98-912a-efe499198a9c req-8b0f8482-240e-4854-b190-05e50222ee7c service nova] Releasing lock "refresh_cache-67e99ada-a8e6-4034-b19b-5b2cb883b735" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.455535] env[62070]: DEBUG oslo_vmware.api [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Task: {'id': task-1121476, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070751} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.459085] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 726.459085] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14c95c9e-6c1c-4087-9f80-1caba3168abe {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.480255] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Reconfiguring VM instance instance-00000024 to attach disk [datastore2] 30d782e4-30c7-41f6-b30d-95a9a59cf83c/30d782e4-30c7-41f6-b30d-95a9a59cf83c.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 726.480814] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d3bc13b-ce36-4de4-80ce-52b997645264 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.507913] env[62070]: DEBUG oslo_vmware.api [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Waiting for the task: (returnval){ [ 726.507913] env[62070]: value = "task-1121477" [ 726.507913] env[62070]: _type = "Task" [ 726.507913] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.517327] env[62070]: DEBUG oslo_vmware.api [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Task: {'id': task-1121477, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.663109] env[62070]: DEBUG nova.network.neutron [req-017d056d-3e7c-4f26-9db5-5e87f0a104a2 req-2d20dd55-766e-4fcc-ad93-ee81f1f3aaae service nova] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Updated VIF entry in instance network info cache for port a3ed0957-14c2-4144-8d45-f4a0e5cb45ab. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 726.663559] env[62070]: DEBUG nova.network.neutron [req-017d056d-3e7c-4f26-9db5-5e87f0a104a2 req-2d20dd55-766e-4fcc-ad93-ee81f1f3aaae service nova] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Updating instance_info_cache with network_info: [{"id": "a3ed0957-14c2-4144-8d45-f4a0e5cb45ab", "address": "fa:16:3e:3c:6a:3d", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3ed0957-14", "ovs_interfaceid": "a3ed0957-14c2-4144-8d45-f4a0e5cb45ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.703948] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121475, 'name': CreateVM_Task, 'duration_secs': 0.661775} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.704178] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 726.704889] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.705105] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.705458] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 726.705739] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1aaacfb2-422e-4e39-9739-7e3f8439c5f3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.711031] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 726.711031] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5207441f-e733-c7cd-7aad-b3c3b39da65d" [ 726.711031] env[62070]: _type = "Task" [ 726.711031] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.719631] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5207441f-e733-c7cd-7aad-b3c3b39da65d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.912813] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquiring lock "refresh_cache-963feecc-ff58-4cbb-8d6f-3f9035337087" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.913042] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquired lock "refresh_cache-963feecc-ff58-4cbb-8d6f-3f9035337087" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.913237] env[62070]: DEBUG nova.network.neutron [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 726.927492] env[62070]: DEBUG nova.compute.manager [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 726.946908] env[62070]: DEBUG nova.compute.utils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 726.953076] env[62070]: DEBUG nova.compute.manager [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 726.953076] env[62070]: DEBUG nova.network.neutron [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 727.020416] env[62070]: DEBUG oslo_vmware.api [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Task: {'id': task-1121477, 'name': ReconfigVM_Task, 'duration_secs': 0.259186} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.020936] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Reconfigured VM instance instance-00000024 to attach disk [datastore2] 30d782e4-30c7-41f6-b30d-95a9a59cf83c/30d782e4-30c7-41f6-b30d-95a9a59cf83c.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 727.021677] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-edc489be-ee25-4502-8880-6952423ba895 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.029409] env[62070]: DEBUG oslo_vmware.api [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Waiting for the task: (returnval){ [ 727.029409] env[62070]: value = "task-1121478" [ 727.029409] env[62070]: _type = "Task" [ 727.029409] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.039180] env[62070]: DEBUG oslo_vmware.api [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Task: {'id': task-1121478, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.081305] env[62070]: DEBUG nova.policy [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a0162190099744eba0d646a05de23435', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3e9dab208bda46418b994df4359da404', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 727.168427] env[62070]: DEBUG oslo_concurrency.lockutils [req-017d056d-3e7c-4f26-9db5-5e87f0a104a2 req-2d20dd55-766e-4fcc-ad93-ee81f1f3aaae service nova] Releasing lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.226229] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5207441f-e733-c7cd-7aad-b3c3b39da65d, 'name': SearchDatastore_Task, 'duration_secs': 0.010971} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.227140] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.227474] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 727.228108] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.228381] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.228669] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 727.229054] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-04b999fb-4d53-48b8-85f1-fcdab988f226 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.238877] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 727.239173] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 727.240264] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-570994c5-660d-45eb-baa7-7ce0d389f42f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.246682] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 727.246682] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]527de002-30e1-58c8-365e-faa5842d1fc4" [ 727.246682] env[62070]: _type = "Task" [ 727.246682] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.258529] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527de002-30e1-58c8-365e-faa5842d1fc4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.452125] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.452817] env[62070]: DEBUG nova.compute.manager [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 727.483517] env[62070]: DEBUG nova.network.neutron [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 727.541230] env[62070]: DEBUG oslo_vmware.api [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Task: {'id': task-1121478, 'name': Rename_Task, 'duration_secs': 0.158141} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.541687] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 727.542055] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f6055e81-74dd-47e1-897c-917d37ef33aa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.548259] env[62070]: DEBUG oslo_vmware.api [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Waiting for the task: (returnval){ [ 727.548259] env[62070]: value = "task-1121479" [ 727.548259] env[62070]: _type = "Task" [ 727.548259] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.558119] env[62070]: DEBUG nova.compute.manager [req-1acc62b5-a65e-432d-88ce-7980a2a187ec req-04e93e1c-be89-4cd9-a8eb-814d477e054e service nova] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Received event network-vif-plugged-eaa2c81a-41e9-4fe8-b2b0-9977156d1505 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 727.558647] env[62070]: DEBUG oslo_concurrency.lockutils [req-1acc62b5-a65e-432d-88ce-7980a2a187ec req-04e93e1c-be89-4cd9-a8eb-814d477e054e service nova] Acquiring lock "963feecc-ff58-4cbb-8d6f-3f9035337087-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.559049] env[62070]: DEBUG oslo_concurrency.lockutils [req-1acc62b5-a65e-432d-88ce-7980a2a187ec req-04e93e1c-be89-4cd9-a8eb-814d477e054e service nova] Lock "963feecc-ff58-4cbb-8d6f-3f9035337087-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.559265] env[62070]: DEBUG oslo_concurrency.lockutils [req-1acc62b5-a65e-432d-88ce-7980a2a187ec req-04e93e1c-be89-4cd9-a8eb-814d477e054e service nova] Lock "963feecc-ff58-4cbb-8d6f-3f9035337087-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.559480] env[62070]: DEBUG nova.compute.manager [req-1acc62b5-a65e-432d-88ce-7980a2a187ec req-04e93e1c-be89-4cd9-a8eb-814d477e054e service nova] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] No waiting events found dispatching network-vif-plugged-eaa2c81a-41e9-4fe8-b2b0-9977156d1505 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 727.559679] env[62070]: WARNING nova.compute.manager [req-1acc62b5-a65e-432d-88ce-7980a2a187ec req-04e93e1c-be89-4cd9-a8eb-814d477e054e service nova] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Received unexpected event network-vif-plugged-eaa2c81a-41e9-4fe8-b2b0-9977156d1505 for instance with vm_state building and task_state spawning. [ 727.560137] env[62070]: DEBUG nova.compute.manager [req-1acc62b5-a65e-432d-88ce-7980a2a187ec req-04e93e1c-be89-4cd9-a8eb-814d477e054e service nova] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Received event network-changed-eaa2c81a-41e9-4fe8-b2b0-9977156d1505 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 727.560617] env[62070]: DEBUG nova.compute.manager [req-1acc62b5-a65e-432d-88ce-7980a2a187ec req-04e93e1c-be89-4cd9-a8eb-814d477e054e service nova] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Refreshing instance network info cache due to event network-changed-eaa2c81a-41e9-4fe8-b2b0-9977156d1505. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 727.560835] env[62070]: DEBUG oslo_concurrency.lockutils [req-1acc62b5-a65e-432d-88ce-7980a2a187ec req-04e93e1c-be89-4cd9-a8eb-814d477e054e service nova] Acquiring lock "refresh_cache-963feecc-ff58-4cbb-8d6f-3f9035337087" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.565898] env[62070]: DEBUG oslo_vmware.api [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Task: {'id': task-1121479, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.765747] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527de002-30e1-58c8-365e-faa5842d1fc4, 'name': SearchDatastore_Task, 'duration_secs': 0.008319} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.773023] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c10f85c-e584-4dd0-9095-e5dde2614fd1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.775568] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 727.775568] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52f03279-0d28-1769-f532-1c08081799d4" [ 727.775568] env[62070]: _type = "Task" [ 727.775568] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.789525] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52f03279-0d28-1769-f532-1c08081799d4, 'name': SearchDatastore_Task, 'duration_secs': 0.009555} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.789828] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.790113] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 67e99ada-a8e6-4034-b19b-5b2cb883b735/67e99ada-a8e6-4034-b19b-5b2cb883b735.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 727.790449] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc741cdd-e131-464e-82ea-d97bda26c9ad {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.799695] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 727.799695] env[62070]: value = "task-1121480" [ 727.799695] env[62070]: _type = "Task" [ 727.799695] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.810861] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121480, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.847845] env[62070]: DEBUG nova.network.neutron [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Updating instance_info_cache with network_info: [{"id": "eaa2c81a-41e9-4fe8-b2b0-9977156d1505", "address": "fa:16:3e:eb:d9:37", "network": {"id": "08004b49-dbc2-4186-9e28-4268e947e8ee", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2022236674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2ecbd5f22c024de8a6b1c45096cb79a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaa2c81a-41", "ovs_interfaceid": "eaa2c81a-41e9-4fe8-b2b0-9977156d1505", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.862524] env[62070]: DEBUG nova.network.neutron [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Successfully created port: c8fbe566-a7d6-48e0-9e05-0bcb216b8111 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 728.025887] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e85cf614-a864-4c29-9be2-6e72025f995f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.036938] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f98bda6d-b272-4d0d-8686-e2898673eb4a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.084500] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97bf3489-31fa-4fe5-a267-a84911e57197 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.094233] env[62070]: DEBUG oslo_vmware.api [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Task: {'id': task-1121479, 'name': PowerOnVM_Task, 'duration_secs': 0.492056} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.097160] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 728.097476] env[62070]: INFO nova.compute.manager [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Took 7.65 seconds to spawn the instance on the hypervisor. [ 728.097713] env[62070]: DEBUG nova.compute.manager [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 728.099269] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3933e203-8531-4938-8933-15249bb29d51 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.104399] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de37b387-b872-459d-a73a-bf649347661f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.126898] env[62070]: DEBUG nova.compute.provider_tree [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 728.309212] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121480, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.479745} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.309499] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 67e99ada-a8e6-4034-b19b-5b2cb883b735/67e99ada-a8e6-4034-b19b-5b2cb883b735.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 728.309986] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 728.310296] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c530cc87-584e-49e4-afbb-f9b741271b10 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.316696] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 728.316696] env[62070]: value = "task-1121481" [ 728.316696] env[62070]: _type = "Task" [ 728.316696] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.324185] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121481, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.351012] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Releasing lock "refresh_cache-963feecc-ff58-4cbb-8d6f-3f9035337087" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.351396] env[62070]: DEBUG nova.compute.manager [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Instance network_info: |[{"id": "eaa2c81a-41e9-4fe8-b2b0-9977156d1505", "address": "fa:16:3e:eb:d9:37", "network": {"id": "08004b49-dbc2-4186-9e28-4268e947e8ee", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2022236674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2ecbd5f22c024de8a6b1c45096cb79a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaa2c81a-41", "ovs_interfaceid": "eaa2c81a-41e9-4fe8-b2b0-9977156d1505", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 728.351702] env[62070]: DEBUG oslo_concurrency.lockutils [req-1acc62b5-a65e-432d-88ce-7980a2a187ec req-04e93e1c-be89-4cd9-a8eb-814d477e054e service nova] Acquired lock "refresh_cache-963feecc-ff58-4cbb-8d6f-3f9035337087" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.351905] env[62070]: DEBUG nova.network.neutron [req-1acc62b5-a65e-432d-88ce-7980a2a187ec req-04e93e1c-be89-4cd9-a8eb-814d477e054e service nova] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Refreshing network info cache for port eaa2c81a-41e9-4fe8-b2b0-9977156d1505 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 728.353132] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:d9:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e41c97-4d75-4041-ae71-321e7e9d480b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eaa2c81a-41e9-4fe8-b2b0-9977156d1505', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 728.360469] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Creating folder: Project (2ecbd5f22c024de8a6b1c45096cb79a7). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 728.363542] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-59d9ac4e-6fdc-4075-9775-5ed4981580a5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.374262] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Created folder: Project (2ecbd5f22c024de8a6b1c45096cb79a7) in parent group-v245319. [ 728.374459] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Creating folder: Instances. Parent ref: group-v245345. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 728.374774] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-793b8576-2b5f-40e6-acd2-465b746b58ee {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.384016] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Created folder: Instances in parent group-v245345. [ 728.384517] env[62070]: DEBUG oslo.service.loopingcall [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 728.384517] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 728.384686] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f8b0513-33da-4b5e-a1c8-da123021cdf9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.403510] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 728.403510] env[62070]: value = "task-1121484" [ 728.403510] env[62070]: _type = "Task" [ 728.403510] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.411608] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121484, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.472726] env[62070]: DEBUG nova.compute.manager [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 728.505601] env[62070]: DEBUG nova.virt.hardware [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 728.505848] env[62070]: DEBUG nova.virt.hardware [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 728.506026] env[62070]: DEBUG nova.virt.hardware [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 728.506230] env[62070]: DEBUG nova.virt.hardware [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 728.507013] env[62070]: DEBUG nova.virt.hardware [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 728.507306] env[62070]: DEBUG nova.virt.hardware [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 728.507430] env[62070]: DEBUG nova.virt.hardware [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 728.507601] env[62070]: DEBUG nova.virt.hardware [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 728.507773] env[62070]: DEBUG nova.virt.hardware [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 728.507932] env[62070]: DEBUG nova.virt.hardware [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 728.508121] env[62070]: DEBUG nova.virt.hardware [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 728.508990] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fce6171-529b-45a8-b4da-6c5cf2debecd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.517036] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b2c1fe3-a77a-4b2e-b911-c95066bbe812 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.634108] env[62070]: DEBUG nova.scheduler.client.report [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 728.643133] env[62070]: INFO nova.compute.manager [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Took 44.68 seconds to build instance. [ 728.658398] env[62070]: DEBUG nova.network.neutron [req-1acc62b5-a65e-432d-88ce-7980a2a187ec req-04e93e1c-be89-4cd9-a8eb-814d477e054e service nova] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Updated VIF entry in instance network info cache for port eaa2c81a-41e9-4fe8-b2b0-9977156d1505. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 728.658523] env[62070]: DEBUG nova.network.neutron [req-1acc62b5-a65e-432d-88ce-7980a2a187ec req-04e93e1c-be89-4cd9-a8eb-814d477e054e service nova] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Updating instance_info_cache with network_info: [{"id": "eaa2c81a-41e9-4fe8-b2b0-9977156d1505", "address": "fa:16:3e:eb:d9:37", "network": {"id": "08004b49-dbc2-4186-9e28-4268e947e8ee", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2022236674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2ecbd5f22c024de8a6b1c45096cb79a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaa2c81a-41", "ovs_interfaceid": "eaa2c81a-41e9-4fe8-b2b0-9977156d1505", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.827177] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121481, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066338} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.827464] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 728.828390] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73f6998-9975-4d32-9ea8-26d48e65df37 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.851371] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] 67e99ada-a8e6-4034-b19b-5b2cb883b735/67e99ada-a8e6-4034-b19b-5b2cb883b735.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 728.851685] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ccbc6850-7dac-457d-81f9-c733548b23d4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.872537] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 728.872537] env[62070]: value = "task-1121485" [ 728.872537] env[62070]: _type = "Task" [ 728.872537] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.885473] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121485, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.912234] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121484, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.141184] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.700s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.141895] env[62070]: DEBUG nova.compute.manager [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 729.145264] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.615s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.146811] env[62070]: INFO nova.compute.claims [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 729.151104] env[62070]: DEBUG oslo_concurrency.lockutils [None req-22f6b4d0-98d5-439d-a2a8-8f5bb53172a4 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Lock "30d782e4-30c7-41f6-b30d-95a9a59cf83c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 117.915s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.161127] env[62070]: DEBUG oslo_concurrency.lockutils [req-1acc62b5-a65e-432d-88ce-7980a2a187ec req-04e93e1c-be89-4cd9-a8eb-814d477e054e service nova] Releasing lock "refresh_cache-963feecc-ff58-4cbb-8d6f-3f9035337087" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.385727] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121485, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.412270] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121484, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.648131] env[62070]: DEBUG nova.compute.manager [req-d3cbc77b-f2a7-4021-a67e-b69a77157aad req-09be7d28-e751-4b30-bceb-f2423a7350c0 service nova] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Received event network-changed-e23bf645-d900-4495-8917-316b3ab16ce6 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 729.648474] env[62070]: DEBUG nova.compute.manager [req-d3cbc77b-f2a7-4021-a67e-b69a77157aad req-09be7d28-e751-4b30-bceb-f2423a7350c0 service nova] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Refreshing instance network info cache due to event network-changed-e23bf645-d900-4495-8917-316b3ab16ce6. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 729.648657] env[62070]: DEBUG oslo_concurrency.lockutils [req-d3cbc77b-f2a7-4021-a67e-b69a77157aad req-09be7d28-e751-4b30-bceb-f2423a7350c0 service nova] Acquiring lock "refresh_cache-d0914f90-200c-4715-aaab-54beacf339b9" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.648827] env[62070]: DEBUG oslo_concurrency.lockutils [req-d3cbc77b-f2a7-4021-a67e-b69a77157aad req-09be7d28-e751-4b30-bceb-f2423a7350c0 service nova] Acquired lock "refresh_cache-d0914f90-200c-4715-aaab-54beacf339b9" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.649030] env[62070]: DEBUG nova.network.neutron [req-d3cbc77b-f2a7-4021-a67e-b69a77157aad req-09be7d28-e751-4b30-bceb-f2423a7350c0 service nova] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Refreshing network info cache for port e23bf645-d900-4495-8917-316b3ab16ce6 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 729.651610] env[62070]: DEBUG nova.compute.utils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 729.654653] env[62070]: DEBUG nova.compute.manager [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 729.654812] env[62070]: DEBUG nova.network.neutron [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 729.659805] env[62070]: DEBUG nova.compute.manager [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 729.853165] env[62070]: DEBUG nova.policy [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a0162190099744eba0d646a05de23435', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3e9dab208bda46418b994df4359da404', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 729.876338] env[62070]: DEBUG nova.network.neutron [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Successfully updated port: c8fbe566-a7d6-48e0-9e05-0bcb216b8111 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 729.892547] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121485, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.917905] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121484, 'name': CreateVM_Task, 'duration_secs': 1.253315} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.921412] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 729.921509] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.922815] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.922815] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 729.922815] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1e77991-dc94-407b-af3c-72fc86296a4c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.926970] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 729.926970] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]527da015-9f53-70bc-e166-7d8bf2e50fdd" [ 729.926970] env[62070]: _type = "Task" [ 729.926970] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.934971] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527da015-9f53-70bc-e166-7d8bf2e50fdd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.155474] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e52961-6ea4-4f1d-8ffa-eba6d6b75dc5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.167407] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b6db4a-2bad-4d34-a08b-a26336c34851 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.201022] env[62070]: DEBUG nova.compute.manager [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 730.208782] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ae5fcfb-a33e-4cbc-aa20-b6433fc4e30b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.214192] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.221089] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d3c94ec-d591-4292-a633-ca4629b89abc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.238451] env[62070]: DEBUG nova.compute.provider_tree [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 730.259572] env[62070]: DEBUG nova.network.neutron [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Successfully created port: d4fdb36b-1cfc-4dcd-86b8-98a8769b4224 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 730.387556] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "refresh_cache-1c1730e5-88af-4c7f-8bcc-d494db2cd723" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.387556] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquired lock "refresh_cache-1c1730e5-88af-4c7f-8bcc-d494db2cd723" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.387773] env[62070]: DEBUG nova.network.neutron [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 730.389030] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121485, 'name': ReconfigVM_Task, 'duration_secs': 1.030267} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.389313] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Reconfigured VM instance instance-00000025 to attach disk [datastore1] 67e99ada-a8e6-4034-b19b-5b2cb883b735/67e99ada-a8e6-4034-b19b-5b2cb883b735.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 730.389848] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-146dd717-3818-4ed6-ac6e-2cb4ee33ac59 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.399116] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 730.399116] env[62070]: value = "task-1121486" [ 730.399116] env[62070]: _type = "Task" [ 730.399116] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.411852] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121486, 'name': Rename_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.435915] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527da015-9f53-70bc-e166-7d8bf2e50fdd, 'name': SearchDatastore_Task, 'duration_secs': 0.008931} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.436247] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.436491] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 730.436737] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.436885] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.437093] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 730.437646] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bc2019f4-d54c-4068-8821-c91b380c0d7f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.445345] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 730.445539] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 730.446229] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9f011b9-a391-49c3-86ee-87febd24f94b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.450944] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 730.450944] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5208c6e2-ad8e-dff8-52a0-885a60c56470" [ 730.450944] env[62070]: _type = "Task" [ 730.450944] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.460146] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5208c6e2-ad8e-dff8-52a0-885a60c56470, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.644913] env[62070]: DEBUG oslo_concurrency.lockutils [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Acquiring lock "30d782e4-30c7-41f6-b30d-95a9a59cf83c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.645009] env[62070]: DEBUG oslo_concurrency.lockutils [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Lock "30d782e4-30c7-41f6-b30d-95a9a59cf83c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.645242] env[62070]: DEBUG oslo_concurrency.lockutils [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Acquiring lock "30d782e4-30c7-41f6-b30d-95a9a59cf83c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.645494] env[62070]: DEBUG oslo_concurrency.lockutils [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Lock "30d782e4-30c7-41f6-b30d-95a9a59cf83c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.645715] env[62070]: DEBUG oslo_concurrency.lockutils [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Lock "30d782e4-30c7-41f6-b30d-95a9a59cf83c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.648859] env[62070]: INFO nova.compute.manager [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Terminating instance [ 730.651980] env[62070]: DEBUG nova.compute.manager [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 730.652199] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 730.653043] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23831fac-55a0-4ff3-9130-e00d0a7b7673 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.663938] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 730.664301] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c3dee781-b265-42e5-a12a-ab3242a31ad5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.671364] env[62070]: DEBUG oslo_vmware.api [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Waiting for the task: (returnval){ [ 730.671364] env[62070]: value = "task-1121487" [ 730.671364] env[62070]: _type = "Task" [ 730.671364] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.681125] env[62070]: DEBUG oslo_vmware.api [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Task: {'id': task-1121487, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.682226] env[62070]: DEBUG nova.network.neutron [req-d3cbc77b-f2a7-4021-a67e-b69a77157aad req-09be7d28-e751-4b30-bceb-f2423a7350c0 service nova] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Updated VIF entry in instance network info cache for port e23bf645-d900-4495-8917-316b3ab16ce6. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 730.682666] env[62070]: DEBUG nova.network.neutron [req-d3cbc77b-f2a7-4021-a67e-b69a77157aad req-09be7d28-e751-4b30-bceb-f2423a7350c0 service nova] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Updating instance_info_cache with network_info: [{"id": "e23bf645-d900-4495-8917-316b3ab16ce6", "address": "fa:16:3e:14:0c:da", "network": {"id": "0c106f8c-7353-4c61-9675-8e9c2ae5dddf", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1747568824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "49dd924c6f8e4a78bb8d57c805ab40f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape23bf645-d9", "ovs_interfaceid": "e23bf645-d900-4495-8917-316b3ab16ce6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.744146] env[62070]: DEBUG nova.scheduler.client.report [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 730.910378] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121486, 'name': Rename_Task, 'duration_secs': 0.138693} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.910378] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 730.910378] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-06a14067-45aa-47d8-85a7-4eb82f0c2b48 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.916125] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 730.916125] env[62070]: value = "task-1121488" [ 730.916125] env[62070]: _type = "Task" [ 730.916125] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.923239] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121488, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.924029] env[62070]: DEBUG nova.network.neutron [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 730.961902] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5208c6e2-ad8e-dff8-52a0-885a60c56470, 'name': SearchDatastore_Task, 'duration_secs': 0.008447} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.962975] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67b362f8-cba8-4f96-adbb-68715eb55454 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.967987] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 730.967987] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52e3ad46-b1ec-0298-8c2d-76c596c72511" [ 730.967987] env[62070]: _type = "Task" [ 730.967987] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.976516] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52e3ad46-b1ec-0298-8c2d-76c596c72511, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.104833] env[62070]: DEBUG nova.network.neutron [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Updating instance_info_cache with network_info: [{"id": "c8fbe566-a7d6-48e0-9e05-0bcb216b8111", "address": "fa:16:3e:12:7e:12", "network": {"id": "b61ea502-bdfd-4ddf-8bb9-1f2f2f003f65", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-216003123-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e9dab208bda46418b994df4359da404", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "099fe970-c61f-4480-bed4-ae4f485fd82a", "external-id": "nsx-vlan-transportzone-678", "segmentation_id": 678, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8fbe566-a7", "ovs_interfaceid": "c8fbe566-a7d6-48e0-9e05-0bcb216b8111", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.182116] env[62070]: DEBUG oslo_vmware.api [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Task: {'id': task-1121487, 'name': PowerOffVM_Task, 'duration_secs': 0.186639} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.182116] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 731.182116] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 731.182116] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-762b8898-6a78-4d8f-8253-64081b2d1a3d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.185368] env[62070]: DEBUG oslo_concurrency.lockutils [req-d3cbc77b-f2a7-4021-a67e-b69a77157aad req-09be7d28-e751-4b30-bceb-f2423a7350c0 service nova] Releasing lock "refresh_cache-d0914f90-200c-4715-aaab-54beacf339b9" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.185626] env[62070]: DEBUG nova.compute.manager [req-d3cbc77b-f2a7-4021-a67e-b69a77157aad req-09be7d28-e751-4b30-bceb-f2423a7350c0 service nova] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Received event network-vif-plugged-c8fbe566-a7d6-48e0-9e05-0bcb216b8111 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 731.185815] env[62070]: DEBUG oslo_concurrency.lockutils [req-d3cbc77b-f2a7-4021-a67e-b69a77157aad req-09be7d28-e751-4b30-bceb-f2423a7350c0 service nova] Acquiring lock "1c1730e5-88af-4c7f-8bcc-d494db2cd723-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.186065] env[62070]: DEBUG oslo_concurrency.lockutils [req-d3cbc77b-f2a7-4021-a67e-b69a77157aad req-09be7d28-e751-4b30-bceb-f2423a7350c0 service nova] Lock "1c1730e5-88af-4c7f-8bcc-d494db2cd723-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.186261] env[62070]: DEBUG oslo_concurrency.lockutils [req-d3cbc77b-f2a7-4021-a67e-b69a77157aad req-09be7d28-e751-4b30-bceb-f2423a7350c0 service nova] Lock "1c1730e5-88af-4c7f-8bcc-d494db2cd723-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.186442] env[62070]: DEBUG nova.compute.manager [req-d3cbc77b-f2a7-4021-a67e-b69a77157aad req-09be7d28-e751-4b30-bceb-f2423a7350c0 service nova] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] No waiting events found dispatching network-vif-plugged-c8fbe566-a7d6-48e0-9e05-0bcb216b8111 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 731.186626] env[62070]: WARNING nova.compute.manager [req-d3cbc77b-f2a7-4021-a67e-b69a77157aad req-09be7d28-e751-4b30-bceb-f2423a7350c0 service nova] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Received unexpected event network-vif-plugged-c8fbe566-a7d6-48e0-9e05-0bcb216b8111 for instance with vm_state building and task_state spawning. [ 731.213293] env[62070]: DEBUG nova.compute.manager [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 731.237950] env[62070]: DEBUG nova.virt.hardware [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 731.238192] env[62070]: DEBUG nova.virt.hardware [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 731.238456] env[62070]: DEBUG nova.virt.hardware [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 731.238795] env[62070]: DEBUG nova.virt.hardware [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 731.239028] env[62070]: DEBUG nova.virt.hardware [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 731.239290] env[62070]: DEBUG nova.virt.hardware [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 731.239630] env[62070]: DEBUG nova.virt.hardware [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 731.239895] env[62070]: DEBUG nova.virt.hardware [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 731.240179] env[62070]: DEBUG nova.virt.hardware [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 731.240466] env[62070]: DEBUG nova.virt.hardware [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 731.240786] env[62070]: DEBUG nova.virt.hardware [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 731.241827] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e652e5e-f751-406f-b6db-f87e01658ba7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.247100] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.102s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.247593] env[62070]: DEBUG nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 731.253252] env[62070]: DEBUG oslo_concurrency.lockutils [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 14.431s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.254982] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 731.255196] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 731.255371] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Deleting the datastore file [datastore2] 30d782e4-30c7-41f6-b30d-95a9a59cf83c {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 731.256943] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edb50bc3-8201-4f08-a78d-04be9a117a73 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.262015] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c25614d1-9799-489e-8a07-0ca123fce6a8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.274570] env[62070]: DEBUG oslo_vmware.api [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Waiting for the task: (returnval){ [ 731.274570] env[62070]: value = "task-1121490" [ 731.274570] env[62070]: _type = "Task" [ 731.274570] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.284080] env[62070]: DEBUG oslo_vmware.api [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Task: {'id': task-1121490, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.426303] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121488, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.477928] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52e3ad46-b1ec-0298-8c2d-76c596c72511, 'name': SearchDatastore_Task, 'duration_secs': 0.010123} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.479881] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.479881] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 963feecc-ff58-4cbb-8d6f-3f9035337087/963feecc-ff58-4cbb-8d6f-3f9035337087.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 731.479881] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-baee98d6-835f-40b0-803d-17f8e1e8cd08 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.485317] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 731.485317] env[62070]: value = "task-1121491" [ 731.485317] env[62070]: _type = "Task" [ 731.485317] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.493606] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121491, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.608712] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Releasing lock "refresh_cache-1c1730e5-88af-4c7f-8bcc-d494db2cd723" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.609066] env[62070]: DEBUG nova.compute.manager [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Instance network_info: |[{"id": "c8fbe566-a7d6-48e0-9e05-0bcb216b8111", "address": "fa:16:3e:12:7e:12", "network": {"id": "b61ea502-bdfd-4ddf-8bb9-1f2f2f003f65", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-216003123-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e9dab208bda46418b994df4359da404", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "099fe970-c61f-4480-bed4-ae4f485fd82a", "external-id": "nsx-vlan-transportzone-678", "segmentation_id": 678, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8fbe566-a7", "ovs_interfaceid": "c8fbe566-a7d6-48e0-9e05-0bcb216b8111", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 731.609609] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:7e:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '099fe970-c61f-4480-bed4-ae4f485fd82a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c8fbe566-a7d6-48e0-9e05-0bcb216b8111', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 731.617224] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Creating folder: Project (3e9dab208bda46418b994df4359da404). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 731.617493] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-78534702-b73d-4b9b-93bf-c9642229abbd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.628143] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Created folder: Project (3e9dab208bda46418b994df4359da404) in parent group-v245319. [ 731.628326] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Creating folder: Instances. Parent ref: group-v245348. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 731.628544] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6fd23eaa-c635-4b3e-9a8d-935e5ffbf490 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.637965] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Created folder: Instances in parent group-v245348. [ 731.638212] env[62070]: DEBUG oslo.service.loopingcall [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 731.638398] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 731.638598] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ecf51854-f00b-48fd-b1ff-044abc8da123 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.657373] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 731.657373] env[62070]: value = "task-1121494" [ 731.657373] env[62070]: _type = "Task" [ 731.657373] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.664717] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121494, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.758045] env[62070]: DEBUG nova.compute.utils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 731.761389] env[62070]: INFO nova.compute.claims [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 731.764861] env[62070]: DEBUG nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 731.765039] env[62070]: DEBUG nova.network.neutron [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 731.787159] env[62070]: DEBUG oslo_vmware.api [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Task: {'id': task-1121490, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.286033} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.787254] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 731.787359] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 731.787548] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 731.787893] env[62070]: INFO nova.compute.manager [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Took 1.14 seconds to destroy the instance on the hypervisor. [ 731.788732] env[62070]: DEBUG oslo.service.loopingcall [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 731.788732] env[62070]: DEBUG nova.compute.manager [-] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 731.788732] env[62070]: DEBUG nova.network.neutron [-] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 731.869101] env[62070]: DEBUG nova.policy [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af61f8424f7445e1bb04628a7ce52dd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a34c8ff9345b46c6976385bc455f7d5e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 731.929214] env[62070]: DEBUG nova.compute.manager [req-1a187239-9b9c-4163-91ed-de6743d4dd34 req-3064faf8-cb33-467e-a589-320e5ec9420f service nova] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Received event network-changed-c8fbe566-a7d6-48e0-9e05-0bcb216b8111 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 731.929214] env[62070]: DEBUG nova.compute.manager [req-1a187239-9b9c-4163-91ed-de6743d4dd34 req-3064faf8-cb33-467e-a589-320e5ec9420f service nova] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Refreshing instance network info cache due to event network-changed-c8fbe566-a7d6-48e0-9e05-0bcb216b8111. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 731.929214] env[62070]: DEBUG oslo_concurrency.lockutils [req-1a187239-9b9c-4163-91ed-de6743d4dd34 req-3064faf8-cb33-467e-a589-320e5ec9420f service nova] Acquiring lock "refresh_cache-1c1730e5-88af-4c7f-8bcc-d494db2cd723" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 731.929214] env[62070]: DEBUG oslo_concurrency.lockutils [req-1a187239-9b9c-4163-91ed-de6743d4dd34 req-3064faf8-cb33-467e-a589-320e5ec9420f service nova] Acquired lock "refresh_cache-1c1730e5-88af-4c7f-8bcc-d494db2cd723" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.929214] env[62070]: DEBUG nova.network.neutron [req-1a187239-9b9c-4163-91ed-de6743d4dd34 req-3064faf8-cb33-467e-a589-320e5ec9420f service nova] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Refreshing network info cache for port c8fbe566-a7d6-48e0-9e05-0bcb216b8111 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 731.941681] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121488, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.000493] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121491, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.170823] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121494, 'name': CreateVM_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.206871] env[62070]: DEBUG nova.compute.manager [req-6c925975-4cad-441b-bd6d-3da81816125e req-63bde7d3-f7ed-48f7-9868-001937d84c77 service nova] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Received event network-vif-deleted-02efa9f5-65b4-4a93-a5d1-a5a15f0d6da0 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 732.207369] env[62070]: INFO nova.compute.manager [req-6c925975-4cad-441b-bd6d-3da81816125e req-63bde7d3-f7ed-48f7-9868-001937d84c77 service nova] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Neutron deleted interface 02efa9f5-65b4-4a93-a5d1-a5a15f0d6da0; detaching it from the instance and deleting it from the info cache [ 732.207369] env[62070]: DEBUG nova.network.neutron [req-6c925975-4cad-441b-bd6d-3da81816125e req-63bde7d3-f7ed-48f7-9868-001937d84c77 service nova] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.265447] env[62070]: DEBUG nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 732.269591] env[62070]: INFO nova.compute.resource_tracker [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Updating resource usage from migration fd5d7c3d-d0ee-488f-a0a2-a1f50cfb458f [ 732.428394] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121488, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.445169] env[62070]: DEBUG nova.network.neutron [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Successfully updated port: d4fdb36b-1cfc-4dcd-86b8-98a8769b4224 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 732.458154] env[62070]: DEBUG nova.network.neutron [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Successfully created port: f3e119aa-83f9-4e1d-b333-01b5bf1a1873 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 732.497935] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121491, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.630923} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.500564] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 963feecc-ff58-4cbb-8d6f-3f9035337087/963feecc-ff58-4cbb-8d6f-3f9035337087.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 732.500779] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 732.501298] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-db51f304-e277-46e9-a02e-6691357844af {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.507836] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 732.507836] env[62070]: value = "task-1121495" [ 732.507836] env[62070]: _type = "Task" [ 732.507836] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.515930] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121495, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.608904] env[62070]: DEBUG nova.network.neutron [-] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.668137] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121494, 'name': CreateVM_Task, 'duration_secs': 0.589311} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.670738] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 732.671803] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.671964] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.672373] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 732.672649] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59927167-118a-4f23-8064-8a36b7416ba1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.677811] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 732.677811] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5248de80-f066-941d-53ba-85c0df01340c" [ 732.677811] env[62070]: _type = "Task" [ 732.677811] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.691448] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5248de80-f066-941d-53ba-85c0df01340c, 'name': SearchDatastore_Task, 'duration_secs': 0.009702} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.691448] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 732.691448] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 732.691689] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.691836] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.692066] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 732.692368] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e90386f3-0da2-4165-9f84-10e755049fa5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.701037] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 732.701962] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 732.702212] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54cdd62a-2072-4234-904d-aa86a48bb907 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.714080] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 732.714080] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ac5fbe-874f-a06d-d7ca-337c3cf8e8f9" [ 732.714080] env[62070]: _type = "Task" [ 732.714080] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.714301] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f95c4af7-d1d8-48b9-b067-a6a1abeb1ef3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.727783] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52ac5fbe-874f-a06d-d7ca-337c3cf8e8f9, 'name': SearchDatastore_Task, 'duration_secs': 0.0081} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.734306] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbed37f3-704b-4822-8d2c-3c43d7ad31ff {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.746504] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-028d7b64-50e0-428f-9cb2-b4a9926d973d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.752139] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 732.752139] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]521b0a0f-eac0-06ba-56cb-75188eb671ac" [ 732.752139] env[62070]: _type = "Task" [ 732.752139] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.756149] env[62070]: DEBUG nova.network.neutron [req-1a187239-9b9c-4163-91ed-de6743d4dd34 req-3064faf8-cb33-467e-a589-320e5ec9420f service nova] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Updated VIF entry in instance network info cache for port c8fbe566-a7d6-48e0-9e05-0bcb216b8111. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 732.756476] env[62070]: DEBUG nova.network.neutron [req-1a187239-9b9c-4163-91ed-de6743d4dd34 req-3064faf8-cb33-467e-a589-320e5ec9420f service nova] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Updating instance_info_cache with network_info: [{"id": "c8fbe566-a7d6-48e0-9e05-0bcb216b8111", "address": "fa:16:3e:12:7e:12", "network": {"id": "b61ea502-bdfd-4ddf-8bb9-1f2f2f003f65", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-216003123-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e9dab208bda46418b994df4359da404", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "099fe970-c61f-4480-bed4-ae4f485fd82a", "external-id": "nsx-vlan-transportzone-678", "segmentation_id": 678, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8fbe566-a7", "ovs_interfaceid": "c8fbe566-a7d6-48e0-9e05-0bcb216b8111", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.765698] env[62070]: DEBUG nova.compute.manager [req-6c925975-4cad-441b-bd6d-3da81816125e req-63bde7d3-f7ed-48f7-9868-001937d84c77 service nova] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Detach interface failed, port_id=02efa9f5-65b4-4a93-a5d1-a5a15f0d6da0, reason: Instance 30d782e4-30c7-41f6-b30d-95a9a59cf83c could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 732.773036] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]521b0a0f-eac0-06ba-56cb-75188eb671ac, 'name': SearchDatastore_Task, 'duration_secs': 0.008832} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.776916] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 732.777645] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 1c1730e5-88af-4c7f-8bcc-d494db2cd723/1c1730e5-88af-4c7f-8bcc-d494db2cd723.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 732.777795] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-611a172f-3958-4b90-974d-489d4fc1fb47 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.786487] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddbcf0cd-4fb9-422e-8426-67cebc32c831 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.791205] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 732.791205] env[62070]: value = "task-1121496" [ 732.791205] env[62070]: _type = "Task" [ 732.791205] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.799089] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e3a8d53-c53e-4a7d-aac9-17afb6bd644e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.805864] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121496, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.834514] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc2b16f-1abd-486c-bfb8-2055cc58f14d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.846881] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6015a30c-0c82-4e06-84b0-92df894de665 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.860842] env[62070]: DEBUG nova.compute.provider_tree [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 732.928289] env[62070]: DEBUG oslo_vmware.api [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121488, 'name': PowerOnVM_Task, 'duration_secs': 1.941573} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.929068] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 732.929068] env[62070]: INFO nova.compute.manager [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Took 9.90 seconds to spawn the instance on the hypervisor. [ 732.929068] env[62070]: DEBUG nova.compute.manager [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 732.929683] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5038792-8a90-42ac-a1e6-d0ff038de12e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.949750] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "refresh_cache-076aed5b-4b08-4f3b-a940-d9cd95c32e57" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.949951] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquired lock "refresh_cache-076aed5b-4b08-4f3b-a940-d9cd95c32e57" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.950084] env[62070]: DEBUG nova.network.neutron [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 733.022428] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121495, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066943} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.022428] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 733.022428] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee3bee5-f105-4dc3-b97b-4fcff0171314 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.052404] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 963feecc-ff58-4cbb-8d6f-3f9035337087/963feecc-ff58-4cbb-8d6f-3f9035337087.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 733.052404] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd4eb3ba-b809-4c54-bdae-e60ffb771e64 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.075287] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 733.075287] env[62070]: value = "task-1121497" [ 733.075287] env[62070]: _type = "Task" [ 733.075287] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.086631] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121497, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.114372] env[62070]: INFO nova.compute.manager [-] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Took 1.32 seconds to deallocate network for instance. [ 733.269794] env[62070]: DEBUG oslo_concurrency.lockutils [req-1a187239-9b9c-4163-91ed-de6743d4dd34 req-3064faf8-cb33-467e-a589-320e5ec9420f service nova] Releasing lock "refresh_cache-1c1730e5-88af-4c7f-8bcc-d494db2cd723" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 733.282072] env[62070]: DEBUG nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 733.301756] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121496, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.479777} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.302021] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 1c1730e5-88af-4c7f-8bcc-d494db2cd723/1c1730e5-88af-4c7f-8bcc-d494db2cd723.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 733.302235] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 733.304217] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9f1e9e20-582d-4aa5-b9ce-4e6e75caab96 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.312359] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 733.312583] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 733.312738] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 733.312915] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 733.313069] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 733.313216] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 733.313438] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 733.313609] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 733.313777] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 733.313937] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 733.314118] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 733.314910] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87358d40-fd0f-463d-bdfd-901ad5ba3b8a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.318777] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 733.318777] env[62070]: value = "task-1121498" [ 733.318777] env[62070]: _type = "Task" [ 733.318777] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.327275] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca52645-31fb-4d57-9e69-3bb96575547a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.333605] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121498, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.364103] env[62070]: DEBUG nova.scheduler.client.report [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 733.446541] env[62070]: INFO nova.compute.manager [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Took 47.48 seconds to build instance. [ 733.482250] env[62070]: DEBUG nova.network.neutron [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 733.585684] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121497, 'name': ReconfigVM_Task, 'duration_secs': 0.440815} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.585929] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 963feecc-ff58-4cbb-8d6f-3f9035337087/963feecc-ff58-4cbb-8d6f-3f9035337087.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 733.586772] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-98349992-7f18-4b53-8b0e-922a3baec802 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.593659] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 733.593659] env[62070]: value = "task-1121499" [ 733.593659] env[62070]: _type = "Task" [ 733.593659] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.602905] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121499, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.620218] env[62070]: DEBUG oslo_concurrency.lockutils [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.659249] env[62070]: DEBUG nova.network.neutron [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Updating instance_info_cache with network_info: [{"id": "d4fdb36b-1cfc-4dcd-86b8-98a8769b4224", "address": "fa:16:3e:83:18:a2", "network": {"id": "b61ea502-bdfd-4ddf-8bb9-1f2f2f003f65", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-216003123-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e9dab208bda46418b994df4359da404", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "099fe970-c61f-4480-bed4-ae4f485fd82a", "external-id": "nsx-vlan-transportzone-678", "segmentation_id": 678, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4fdb36b-1c", "ovs_interfaceid": "d4fdb36b-1cfc-4dcd-86b8-98a8769b4224", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.828966] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121498, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080304} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.829324] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 733.830026] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47256f2d-982f-4840-ace8-c0c812d28de9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.852317] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 1c1730e5-88af-4c7f-8bcc-d494db2cd723/1c1730e5-88af-4c7f-8bcc-d494db2cd723.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 733.852586] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1716c46b-e5a5-42c8-bfe1-a3bcda06b92c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.869507] env[62070]: DEBUG oslo_concurrency.lockutils [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.616s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.869716] env[62070]: INFO nova.compute.manager [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Migrating [ 733.869947] env[62070]: DEBUG oslo_concurrency.lockutils [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.870364] env[62070]: DEBUG oslo_concurrency.lockutils [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquired lock "compute-rpcapi-router" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.872626] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.616s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.874240] env[62070]: INFO nova.compute.claims [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 733.877790] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 733.877790] env[62070]: value = "task-1121500" [ 733.877790] env[62070]: _type = "Task" [ 733.877790] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.886057] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121500, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.949705] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9903219f-0926-4eb2-b4c0-4bfda725a93d tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "67e99ada-a8e6-4034-b19b-5b2cb883b735" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 122.230s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.955137] env[62070]: DEBUG nova.compute.manager [req-1108d440-2007-4f6b-8819-60ac34d713a0 req-50038d26-3eb0-48cf-9d18-501e4f895811 service nova] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Received event network-vif-plugged-d4fdb36b-1cfc-4dcd-86b8-98a8769b4224 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 733.955389] env[62070]: DEBUG oslo_concurrency.lockutils [req-1108d440-2007-4f6b-8819-60ac34d713a0 req-50038d26-3eb0-48cf-9d18-501e4f895811 service nova] Acquiring lock "076aed5b-4b08-4f3b-a940-d9cd95c32e57-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.955713] env[62070]: DEBUG oslo_concurrency.lockutils [req-1108d440-2007-4f6b-8819-60ac34d713a0 req-50038d26-3eb0-48cf-9d18-501e4f895811 service nova] Lock "076aed5b-4b08-4f3b-a940-d9cd95c32e57-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.955931] env[62070]: DEBUG oslo_concurrency.lockutils [req-1108d440-2007-4f6b-8819-60ac34d713a0 req-50038d26-3eb0-48cf-9d18-501e4f895811 service nova] Lock "076aed5b-4b08-4f3b-a940-d9cd95c32e57-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.956189] env[62070]: DEBUG nova.compute.manager [req-1108d440-2007-4f6b-8819-60ac34d713a0 req-50038d26-3eb0-48cf-9d18-501e4f895811 service nova] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] No waiting events found dispatching network-vif-plugged-d4fdb36b-1cfc-4dcd-86b8-98a8769b4224 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 733.956429] env[62070]: WARNING nova.compute.manager [req-1108d440-2007-4f6b-8819-60ac34d713a0 req-50038d26-3eb0-48cf-9d18-501e4f895811 service nova] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Received unexpected event network-vif-plugged-d4fdb36b-1cfc-4dcd-86b8-98a8769b4224 for instance with vm_state building and task_state spawning. [ 733.956652] env[62070]: DEBUG nova.compute.manager [req-1108d440-2007-4f6b-8819-60ac34d713a0 req-50038d26-3eb0-48cf-9d18-501e4f895811 service nova] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Received event network-changed-d4fdb36b-1cfc-4dcd-86b8-98a8769b4224 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 733.956842] env[62070]: DEBUG nova.compute.manager [req-1108d440-2007-4f6b-8819-60ac34d713a0 req-50038d26-3eb0-48cf-9d18-501e4f895811 service nova] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Refreshing instance network info cache due to event network-changed-d4fdb36b-1cfc-4dcd-86b8-98a8769b4224. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 733.957067] env[62070]: DEBUG oslo_concurrency.lockutils [req-1108d440-2007-4f6b-8819-60ac34d713a0 req-50038d26-3eb0-48cf-9d18-501e4f895811 service nova] Acquiring lock "refresh_cache-076aed5b-4b08-4f3b-a940-d9cd95c32e57" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.103815] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121499, 'name': Rename_Task, 'duration_secs': 0.147836} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.104429] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 734.104839] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-51f4f30a-64f4-40f8-915d-779cdd9b9a27 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.111499] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 734.111499] env[62070]: value = "task-1121501" [ 734.111499] env[62070]: _type = "Task" [ 734.111499] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.120028] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121501, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.162734] env[62070]: DEBUG nova.network.neutron [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Successfully updated port: f3e119aa-83f9-4e1d-b333-01b5bf1a1873 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 734.164123] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Releasing lock "refresh_cache-076aed5b-4b08-4f3b-a940-d9cd95c32e57" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.164398] env[62070]: DEBUG nova.compute.manager [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Instance network_info: |[{"id": "d4fdb36b-1cfc-4dcd-86b8-98a8769b4224", "address": "fa:16:3e:83:18:a2", "network": {"id": "b61ea502-bdfd-4ddf-8bb9-1f2f2f003f65", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-216003123-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e9dab208bda46418b994df4359da404", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "099fe970-c61f-4480-bed4-ae4f485fd82a", "external-id": "nsx-vlan-transportzone-678", "segmentation_id": 678, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4fdb36b-1c", "ovs_interfaceid": "d4fdb36b-1cfc-4dcd-86b8-98a8769b4224", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 734.164863] env[62070]: DEBUG oslo_concurrency.lockutils [req-1108d440-2007-4f6b-8819-60ac34d713a0 req-50038d26-3eb0-48cf-9d18-501e4f895811 service nova] Acquired lock "refresh_cache-076aed5b-4b08-4f3b-a940-d9cd95c32e57" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.165774] env[62070]: DEBUG nova.network.neutron [req-1108d440-2007-4f6b-8819-60ac34d713a0 req-50038d26-3eb0-48cf-9d18-501e4f895811 service nova] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Refreshing network info cache for port d4fdb36b-1cfc-4dcd-86b8-98a8769b4224 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 734.166922] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:18:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '099fe970-c61f-4480-bed4-ae4f485fd82a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd4fdb36b-1cfc-4dcd-86b8-98a8769b4224', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 734.176048] env[62070]: DEBUG oslo.service.loopingcall [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 734.177125] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 734.177389] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c4c60d78-f7ee-4974-bf98-2a80bc59052e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.199319] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 734.199319] env[62070]: value = "task-1121502" [ 734.199319] env[62070]: _type = "Task" [ 734.199319] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.207860] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121502, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.378776] env[62070]: INFO nova.compute.rpcapi [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Automatically selected compute RPC version 6.3 from minimum service version 67 [ 734.379423] env[62070]: DEBUG oslo_concurrency.lockutils [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Releasing lock "compute-rpcapi-router" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.407615] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121500, 'name': ReconfigVM_Task, 'duration_secs': 0.269757} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.409046] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 1c1730e5-88af-4c7f-8bcc-d494db2cd723/1c1730e5-88af-4c7f-8bcc-d494db2cd723.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 734.409046] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-093d0417-6842-4405-85c1-15e937bf3171 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.415556] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 734.415556] env[62070]: value = "task-1121503" [ 734.415556] env[62070]: _type = "Task" [ 734.415556] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.425068] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121503, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.451865] env[62070]: DEBUG nova.compute.manager [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 734.622851] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121501, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.667626] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquiring lock "refresh_cache-fe378560-40b8-42c9-840d-b7d60de87c4d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.667626] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquired lock "refresh_cache-fe378560-40b8-42c9-840d-b7d60de87c4d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.667626] env[62070]: DEBUG nova.network.neutron [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 734.710446] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121502, 'name': CreateVM_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.771650] env[62070]: DEBUG nova.compute.manager [req-2f7403fc-59bc-41d8-b46c-3d81e18aa406 req-75fcace9-d832-41cf-b3da-55e3b3c60d42 service nova] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Received event network-changed-e01eb485-1347-4afb-b881-62797a5b84af {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 734.771650] env[62070]: DEBUG nova.compute.manager [req-2f7403fc-59bc-41d8-b46c-3d81e18aa406 req-75fcace9-d832-41cf-b3da-55e3b3c60d42 service nova] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Refreshing instance network info cache due to event network-changed-e01eb485-1347-4afb-b881-62797a5b84af. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 734.772278] env[62070]: DEBUG oslo_concurrency.lockutils [req-2f7403fc-59bc-41d8-b46c-3d81e18aa406 req-75fcace9-d832-41cf-b3da-55e3b3c60d42 service nova] Acquiring lock "refresh_cache-67e99ada-a8e6-4034-b19b-5b2cb883b735" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.772278] env[62070]: DEBUG oslo_concurrency.lockutils [req-2f7403fc-59bc-41d8-b46c-3d81e18aa406 req-75fcace9-d832-41cf-b3da-55e3b3c60d42 service nova] Acquired lock "refresh_cache-67e99ada-a8e6-4034-b19b-5b2cb883b735" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.772278] env[62070]: DEBUG nova.network.neutron [req-2f7403fc-59bc-41d8-b46c-3d81e18aa406 req-75fcace9-d832-41cf-b3da-55e3b3c60d42 service nova] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Refreshing network info cache for port e01eb485-1347-4afb-b881-62797a5b84af {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 734.909188] env[62070]: DEBUG oslo_concurrency.lockutils [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquiring lock "refresh_cache-d148d561-3211-4f1f-965a-f2b14cd60b11" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.909457] env[62070]: DEBUG oslo_concurrency.lockutils [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquired lock "refresh_cache-d148d561-3211-4f1f-965a-f2b14cd60b11" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.909555] env[62070]: DEBUG nova.network.neutron [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 734.931591] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121503, 'name': Rename_Task, 'duration_secs': 0.158386} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.934300] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 734.934959] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6aabbc93-17f4-4df9-9b5e-07a6b3b682d8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.945742] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 734.945742] env[62070]: value = "task-1121504" [ 734.945742] env[62070]: _type = "Task" [ 734.945742] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.960494] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121504, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.976030] env[62070]: DEBUG oslo_concurrency.lockutils [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.006328] env[62070]: DEBUG nova.network.neutron [req-1108d440-2007-4f6b-8819-60ac34d713a0 req-50038d26-3eb0-48cf-9d18-501e4f895811 service nova] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Updated VIF entry in instance network info cache for port d4fdb36b-1cfc-4dcd-86b8-98a8769b4224. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 735.007562] env[62070]: DEBUG nova.network.neutron [req-1108d440-2007-4f6b-8819-60ac34d713a0 req-50038d26-3eb0-48cf-9d18-501e4f895811 service nova] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Updating instance_info_cache with network_info: [{"id": "d4fdb36b-1cfc-4dcd-86b8-98a8769b4224", "address": "fa:16:3e:83:18:a2", "network": {"id": "b61ea502-bdfd-4ddf-8bb9-1f2f2f003f65", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-216003123-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e9dab208bda46418b994df4359da404", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "099fe970-c61f-4480-bed4-ae4f485fd82a", "external-id": "nsx-vlan-transportzone-678", "segmentation_id": 678, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4fdb36b-1c", "ovs_interfaceid": "d4fdb36b-1cfc-4dcd-86b8-98a8769b4224", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.127685] env[62070]: DEBUG oslo_vmware.api [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121501, 'name': PowerOnVM_Task, 'duration_secs': 0.584561} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.128109] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 735.128443] env[62070]: INFO nova.compute.manager [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Took 9.32 seconds to spawn the instance on the hypervisor. [ 735.128863] env[62070]: DEBUG nova.compute.manager [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 735.130088] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d4152f-7183-422e-b413-2c49e9347db8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.211946] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121502, 'name': CreateVM_Task, 'duration_secs': 0.760133} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.212138] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 735.212894] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 735.213069] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.213615] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 735.213871] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18534951-db2b-4e4b-97d4-70c552c184a5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.221010] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 735.221010] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]525bb7c7-7f57-ad10-bb93-4aa74785d36b" [ 735.221010] env[62070]: _type = "Task" [ 735.221010] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.226937] env[62070]: DEBUG nova.network.neutron [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 735.235978] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]525bb7c7-7f57-ad10-bb93-4aa74785d36b, 'name': SearchDatastore_Task, 'duration_secs': 0.009394} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.235978] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.236111] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 735.236318] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 735.236444] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.236642] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 735.237199] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-68054b0a-a0bb-4913-b7c0-d098eabede3d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.246346] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 735.246556] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 735.249114] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f07c982-7cb5-4bd2-bddd-13839607cf94 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.254799] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 735.254799] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]521188ec-6c74-ed3b-c305-c629ba489590" [ 735.254799] env[62070]: _type = "Task" [ 735.254799] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.264879] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]521188ec-6c74-ed3b-c305-c629ba489590, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.387886] env[62070]: DEBUG nova.network.neutron [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Updating instance_info_cache with network_info: [{"id": "f3e119aa-83f9-4e1d-b333-01b5bf1a1873", "address": "fa:16:3e:c2:5d:9d", "network": {"id": "c73b94bb-f960-4081-9afd-eb785d4bcf76", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1999445580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a34c8ff9345b46c6976385bc455f7d5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3e119aa-83", "ovs_interfaceid": "f3e119aa-83f9-4e1d-b333-01b5bf1a1873", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.393282] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1d7fc6f-2b04-4f8d-9109-9e626c081f04 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.403821] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b950141-e6f8-42c4-baa5-b205b0a9e366 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.444083] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1204b2a-c4ad-4a6e-8162-18a3d672b8a8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.461302] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-484d3aa5-7d81-4b67-ab2d-e57930a29ae8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.465676] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121504, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.476768] env[62070]: DEBUG nova.compute.provider_tree [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 735.509946] env[62070]: DEBUG oslo_concurrency.lockutils [req-1108d440-2007-4f6b-8819-60ac34d713a0 req-50038d26-3eb0-48cf-9d18-501e4f895811 service nova] Releasing lock "refresh_cache-076aed5b-4b08-4f3b-a940-d9cd95c32e57" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.600404] env[62070]: DEBUG nova.network.neutron [req-2f7403fc-59bc-41d8-b46c-3d81e18aa406 req-75fcace9-d832-41cf-b3da-55e3b3c60d42 service nova] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Updated VIF entry in instance network info cache for port e01eb485-1347-4afb-b881-62797a5b84af. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 735.600808] env[62070]: DEBUG nova.network.neutron [req-2f7403fc-59bc-41d8-b46c-3d81e18aa406 req-75fcace9-d832-41cf-b3da-55e3b3c60d42 service nova] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Updating instance_info_cache with network_info: [{"id": "e01eb485-1347-4afb-b881-62797a5b84af", "address": "fa:16:3e:d1:62:ff", "network": {"id": "0d81bd04-b549-4e1f-97a2-0a0b9391dd3f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-108214409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c91e5eeeeb1742f499b2edaf76a93a3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape01eb485-13", "ovs_interfaceid": "e01eb485-1347-4afb-b881-62797a5b84af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.656609] env[62070]: INFO nova.compute.manager [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Took 47.55 seconds to build instance. [ 735.691671] env[62070]: DEBUG nova.network.neutron [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Updating instance_info_cache with network_info: [{"id": "c5e6098a-ebbb-4eee-ba72-4ddaad679830", "address": "fa:16:3e:7b:ab:3c", "network": {"id": "df33a08d-88db-4a22-846f-5b414705fc65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9d42cb2bbadf40d6b35f237f71234611", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5e6098a-eb", "ovs_interfaceid": "c5e6098a-ebbb-4eee-ba72-4ddaad679830", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.766899] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]521188ec-6c74-ed3b-c305-c629ba489590, 'name': SearchDatastore_Task, 'duration_secs': 0.009334} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.767729] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52bfb34f-afa8-433c-acf2-1637059e0ad6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.775405] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 735.775405] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52498499-73bb-f8e2-ee90-d80122b982ba" [ 735.775405] env[62070]: _type = "Task" [ 735.775405] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.781312] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52498499-73bb-f8e2-ee90-d80122b982ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.891731] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Releasing lock "refresh_cache-fe378560-40b8-42c9-840d-b7d60de87c4d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.892099] env[62070]: DEBUG nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Instance network_info: |[{"id": "f3e119aa-83f9-4e1d-b333-01b5bf1a1873", "address": "fa:16:3e:c2:5d:9d", "network": {"id": "c73b94bb-f960-4081-9afd-eb785d4bcf76", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1999445580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a34c8ff9345b46c6976385bc455f7d5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3e119aa-83", "ovs_interfaceid": "f3e119aa-83f9-4e1d-b333-01b5bf1a1873", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 735.892592] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:5d:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b8b5b5e2-866d-4ab5-b74d-4a47de0c4877', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f3e119aa-83f9-4e1d-b333-01b5bf1a1873', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 735.900760] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Creating folder: Project (a34c8ff9345b46c6976385bc455f7d5e). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 735.901052] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-960dec76-1f31-4fa6-9c16-b24eece4ba85 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.912480] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Created folder: Project (a34c8ff9345b46c6976385bc455f7d5e) in parent group-v245319. [ 735.912685] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Creating folder: Instances. Parent ref: group-v245352. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 735.912891] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a70cf6cb-3ac5-4a2e-abe0-c0ef5d2b5d24 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.922733] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Created folder: Instances in parent group-v245352. [ 735.922962] env[62070]: DEBUG oslo.service.loopingcall [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 735.923170] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 735.923370] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6e1c7e42-8acb-4394-9f68-2d3346117b0c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.943085] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 735.943085] env[62070]: value = "task-1121507" [ 735.943085] env[62070]: _type = "Task" [ 735.943085] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.958337] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121504, 'name': PowerOnVM_Task, 'duration_secs': 0.555702} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.958599] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 735.958776] env[62070]: INFO nova.compute.manager [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Took 7.49 seconds to spawn the instance on the hypervisor. [ 735.958950] env[62070]: DEBUG nova.compute.manager [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 735.959693] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c12624-9f1b-4d67-b020-7bcd69ebc219 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.980197] env[62070]: DEBUG nova.scheduler.client.report [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 735.986184] env[62070]: DEBUG nova.compute.manager [req-61b087db-73f3-473c-b663-23b00c2aee84 req-7ebe8c2a-4105-4a67-a07a-37aa6106020e service nova] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Received event network-vif-plugged-f3e119aa-83f9-4e1d-b333-01b5bf1a1873 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 735.986406] env[62070]: DEBUG oslo_concurrency.lockutils [req-61b087db-73f3-473c-b663-23b00c2aee84 req-7ebe8c2a-4105-4a67-a07a-37aa6106020e service nova] Acquiring lock "fe378560-40b8-42c9-840d-b7d60de87c4d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.986927] env[62070]: DEBUG oslo_concurrency.lockutils [req-61b087db-73f3-473c-b663-23b00c2aee84 req-7ebe8c2a-4105-4a67-a07a-37aa6106020e service nova] Lock "fe378560-40b8-42c9-840d-b7d60de87c4d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.986927] env[62070]: DEBUG oslo_concurrency.lockutils [req-61b087db-73f3-473c-b663-23b00c2aee84 req-7ebe8c2a-4105-4a67-a07a-37aa6106020e service nova] Lock "fe378560-40b8-42c9-840d-b7d60de87c4d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.987155] env[62070]: DEBUG nova.compute.manager [req-61b087db-73f3-473c-b663-23b00c2aee84 req-7ebe8c2a-4105-4a67-a07a-37aa6106020e service nova] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] No waiting events found dispatching network-vif-plugged-f3e119aa-83f9-4e1d-b333-01b5bf1a1873 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 735.987195] env[62070]: WARNING nova.compute.manager [req-61b087db-73f3-473c-b663-23b00c2aee84 req-7ebe8c2a-4105-4a67-a07a-37aa6106020e service nova] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Received unexpected event network-vif-plugged-f3e119aa-83f9-4e1d-b333-01b5bf1a1873 for instance with vm_state building and task_state spawning. [ 735.987351] env[62070]: DEBUG nova.compute.manager [req-61b087db-73f3-473c-b663-23b00c2aee84 req-7ebe8c2a-4105-4a67-a07a-37aa6106020e service nova] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Received event network-changed-f3e119aa-83f9-4e1d-b333-01b5bf1a1873 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 735.987503] env[62070]: DEBUG nova.compute.manager [req-61b087db-73f3-473c-b663-23b00c2aee84 req-7ebe8c2a-4105-4a67-a07a-37aa6106020e service nova] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Refreshing instance network info cache due to event network-changed-f3e119aa-83f9-4e1d-b333-01b5bf1a1873. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 735.987904] env[62070]: DEBUG oslo_concurrency.lockutils [req-61b087db-73f3-473c-b663-23b00c2aee84 req-7ebe8c2a-4105-4a67-a07a-37aa6106020e service nova] Acquiring lock "refresh_cache-fe378560-40b8-42c9-840d-b7d60de87c4d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 735.987904] env[62070]: DEBUG oslo_concurrency.lockutils [req-61b087db-73f3-473c-b663-23b00c2aee84 req-7ebe8c2a-4105-4a67-a07a-37aa6106020e service nova] Acquired lock "refresh_cache-fe378560-40b8-42c9-840d-b7d60de87c4d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.988063] env[62070]: DEBUG nova.network.neutron [req-61b087db-73f3-473c-b663-23b00c2aee84 req-7ebe8c2a-4105-4a67-a07a-37aa6106020e service nova] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Refreshing network info cache for port f3e119aa-83f9-4e1d-b333-01b5bf1a1873 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 736.107027] env[62070]: DEBUG oslo_concurrency.lockutils [req-2f7403fc-59bc-41d8-b46c-3d81e18aa406 req-75fcace9-d832-41cf-b3da-55e3b3c60d42 service nova] Releasing lock "refresh_cache-67e99ada-a8e6-4034-b19b-5b2cb883b735" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 736.159168] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87135f2c-55d6-41ab-9b21-58289e78f833 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Lock "963feecc-ff58-4cbb-8d6f-3f9035337087" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.913s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.194645] env[62070]: DEBUG oslo_concurrency.lockutils [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Releasing lock "refresh_cache-d148d561-3211-4f1f-965a-f2b14cd60b11" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 736.283739] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52498499-73bb-f8e2-ee90-d80122b982ba, 'name': SearchDatastore_Task, 'duration_secs': 0.029183} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.284031] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 736.284298] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 076aed5b-4b08-4f3b-a940-d9cd95c32e57/076aed5b-4b08-4f3b-a940-d9cd95c32e57.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 736.284548] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ef1b257-3456-4007-9b1b-345768acb6ec {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.290619] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 736.290619] env[62070]: value = "task-1121508" [ 736.290619] env[62070]: _type = "Task" [ 736.290619] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.298451] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121508, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.453235] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121507, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.477814] env[62070]: INFO nova.compute.manager [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Took 46.43 seconds to build instance. [ 736.484536] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.612s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.485131] env[62070]: DEBUG nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 736.487596] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.787s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 736.489029] env[62070]: INFO nova.compute.claims [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 736.661896] env[62070]: DEBUG nova.compute.manager [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 736.683308] env[62070]: INFO nova.compute.manager [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Rescuing [ 736.683596] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquiring lock "refresh_cache-963feecc-ff58-4cbb-8d6f-3f9035337087" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.683756] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquired lock "refresh_cache-963feecc-ff58-4cbb-8d6f-3f9035337087" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.683916] env[62070]: DEBUG nova.network.neutron [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 736.727177] env[62070]: DEBUG nova.network.neutron [req-61b087db-73f3-473c-b663-23b00c2aee84 req-7ebe8c2a-4105-4a67-a07a-37aa6106020e service nova] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Updated VIF entry in instance network info cache for port f3e119aa-83f9-4e1d-b333-01b5bf1a1873. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 736.727314] env[62070]: DEBUG nova.network.neutron [req-61b087db-73f3-473c-b663-23b00c2aee84 req-7ebe8c2a-4105-4a67-a07a-37aa6106020e service nova] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Updating instance_info_cache with network_info: [{"id": "f3e119aa-83f9-4e1d-b333-01b5bf1a1873", "address": "fa:16:3e:c2:5d:9d", "network": {"id": "c73b94bb-f960-4081-9afd-eb785d4bcf76", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1999445580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a34c8ff9345b46c6976385bc455f7d5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3e119aa-83", "ovs_interfaceid": "f3e119aa-83f9-4e1d-b333-01b5bf1a1873", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.801706] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121508, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.953868] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121507, 'name': CreateVM_Task, 'duration_secs': 0.547612} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.954154] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 736.954741] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.954903] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.955278] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 736.955581] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6261f9e2-9da3-4935-9010-4012f060dc0e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.960413] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 736.960413] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5280d087-776a-14e3-de80-869dbd827b12" [ 736.960413] env[62070]: _type = "Task" [ 736.960413] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.968442] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5280d087-776a-14e3-de80-869dbd827b12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.980338] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "1c1730e5-88af-4c7f-8bcc-d494db2cd723" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 106.809s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.993693] env[62070]: DEBUG nova.compute.utils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 736.995327] env[62070]: DEBUG nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 736.995639] env[62070]: DEBUG nova.network.neutron [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 737.049665] env[62070]: DEBUG nova.policy [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af61f8424f7445e1bb04628a7ce52dd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a34c8ff9345b46c6976385bc455f7d5e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 737.185628] env[62070]: DEBUG oslo_concurrency.lockutils [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.229522] env[62070]: DEBUG oslo_concurrency.lockutils [req-61b087db-73f3-473c-b663-23b00c2aee84 req-7ebe8c2a-4105-4a67-a07a-37aa6106020e service nova] Releasing lock "refresh_cache-fe378560-40b8-42c9-840d-b7d60de87c4d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.306716] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121508, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.316806] env[62070]: DEBUG nova.network.neutron [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Successfully created port: 1989edae-bc69-457e-ab09-93742636d663 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 737.453372] env[62070]: DEBUG nova.network.neutron [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Updating instance_info_cache with network_info: [{"id": "eaa2c81a-41e9-4fe8-b2b0-9977156d1505", "address": "fa:16:3e:eb:d9:37", "network": {"id": "08004b49-dbc2-4186-9e28-4268e947e8ee", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2022236674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2ecbd5f22c024de8a6b1c45096cb79a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaa2c81a-41", "ovs_interfaceid": "eaa2c81a-41e9-4fe8-b2b0-9977156d1505", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.471769] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5280d087-776a-14e3-de80-869dbd827b12, 'name': SearchDatastore_Task, 'duration_secs': 0.013126} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.472742] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.472995] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 737.473427] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.473614] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.473802] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 737.475034] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-10828084-c74a-471a-85f8-b96e035380fc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.484488] env[62070]: DEBUG nova.compute.manager [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 737.489528] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 737.489663] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 737.494026] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-770a5312-2598-4b69-997e-20f85e5d0e5a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.496490] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 737.496490] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52fe60bf-3a62-68f9-1d16-b71815ddffbd" [ 737.496490] env[62070]: _type = "Task" [ 737.496490] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.500375] env[62070]: DEBUG nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 737.512030] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52fe60bf-3a62-68f9-1d16-b71815ddffbd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.709754] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a99dbf1-6496-48ef-80a5-5b4a34a3929e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.729448] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Updating instance 'd148d561-3211-4f1f-965a-f2b14cd60b11' progress to 0 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 737.803224] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121508, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.320025} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.805808] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 076aed5b-4b08-4f3b-a940-d9cd95c32e57/076aed5b-4b08-4f3b-a940-d9cd95c32e57.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 737.806049] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 737.807180] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e53e38f-0385-4681-8d8e-0b4c350af6c8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.812627] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 737.812627] env[62070]: value = "task-1121509" [ 737.812627] env[62070]: _type = "Task" [ 737.812627] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.822797] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121509, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.957515] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Releasing lock "refresh_cache-963feecc-ff58-4cbb-8d6f-3f9035337087" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.972820] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c7f8a3-ec26-48fe-88c8-b0d462692c96 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.987276] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebaef64c-e784-46bf-8423-1a5fb443dd84 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.028810] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-889a322c-168a-4c28-9af0-99923afd1cf4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.034055] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.039816] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52fe60bf-3a62-68f9-1d16-b71815ddffbd, 'name': SearchDatastore_Task, 'duration_secs': 0.048091} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.042615] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55301b5c-097f-469b-bcfb-6d750be0295b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.045590] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f07d447-c632-4a7b-9140-e8f978b5312b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.052352] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 738.052352] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52bd3f66-a138-dd9f-1404-970e7d2b420d" [ 738.052352] env[62070]: _type = "Task" [ 738.052352] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.061302] env[62070]: DEBUG nova.compute.provider_tree [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 738.071028] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52bd3f66-a138-dd9f-1404-970e7d2b420d, 'name': SearchDatastore_Task, 'duration_secs': 0.011134} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.071801] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.072122] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] fe378560-40b8-42c9-840d-b7d60de87c4d/fe378560-40b8-42c9-840d-b7d60de87c4d.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 738.072409] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-58644bf8-945a-4dc3-b3a0-c4106d7e2381 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.079280] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 738.079280] env[62070]: value = "task-1121510" [ 738.079280] env[62070]: _type = "Task" [ 738.079280] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.087494] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121510, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.237479] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 738.237840] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-566726b9-d19f-446e-8208-643f8c8e8dab {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.244309] env[62070]: DEBUG oslo_vmware.api [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Waiting for the task: (returnval){ [ 738.244309] env[62070]: value = "task-1121511" [ 738.244309] env[62070]: _type = "Task" [ 738.244309] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.252406] env[62070]: DEBUG oslo_vmware.api [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121511, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.322520] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121509, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.17946} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.323866] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 738.323988] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eec29113-a625-473f-bf8b-b69e0dafd98e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.347261] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] 076aed5b-4b08-4f3b-a940-d9cd95c32e57/076aed5b-4b08-4f3b-a940-d9cd95c32e57.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 738.347543] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2d0ec022-d1b5-485c-93c7-bf378d958159 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.367424] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 738.367424] env[62070]: value = "task-1121512" [ 738.367424] env[62070]: _type = "Task" [ 738.367424] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.376653] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121512, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.495021] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 738.495324] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e81e3ae0-9636-4ffd-9d01-ae12a5bb0305 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.503923] env[62070]: DEBUG oslo_vmware.api [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 738.503923] env[62070]: value = "task-1121513" [ 738.503923] env[62070]: _type = "Task" [ 738.503923] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.514029] env[62070]: DEBUG oslo_vmware.api [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121513, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.535416] env[62070]: DEBUG nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 738.563411] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 738.563913] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 738.564191] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 738.564508] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 738.564766] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 738.565041] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 738.565368] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 738.565638] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 738.565846] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 738.566170] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 738.566426] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 738.567634] env[62070]: DEBUG nova.scheduler.client.report [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 738.574360] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79547afb-0a8a-4c42-aedb-6a591b23570c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.592623] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-490c6dcb-5644-4b86-95b1-c68e64fb7de9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.601424] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121510, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.754746] env[62070]: DEBUG oslo_vmware.api [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121511, 'name': PowerOffVM_Task, 'duration_secs': 0.379251} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.754924] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 738.755028] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Updating instance 'd148d561-3211-4f1f-965a-f2b14cd60b11' progress to 17 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 738.876611] env[62070]: DEBUG nova.compute.manager [req-abdc89e0-b884-42a5-af30-ee880393d440 req-d33ecb46-f8f7-4de9-b39e-d909a076fc81 service nova] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Received event network-vif-plugged-1989edae-bc69-457e-ab09-93742636d663 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 738.876865] env[62070]: DEBUG oslo_concurrency.lockutils [req-abdc89e0-b884-42a5-af30-ee880393d440 req-d33ecb46-f8f7-4de9-b39e-d909a076fc81 service nova] Acquiring lock "bcafa04d-904b-4eab-aba1-35180c2d4b22-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.877122] env[62070]: DEBUG oslo_concurrency.lockutils [req-abdc89e0-b884-42a5-af30-ee880393d440 req-d33ecb46-f8f7-4de9-b39e-d909a076fc81 service nova] Lock "bcafa04d-904b-4eab-aba1-35180c2d4b22-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.877306] env[62070]: DEBUG oslo_concurrency.lockutils [req-abdc89e0-b884-42a5-af30-ee880393d440 req-d33ecb46-f8f7-4de9-b39e-d909a076fc81 service nova] Lock "bcafa04d-904b-4eab-aba1-35180c2d4b22-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.877495] env[62070]: DEBUG nova.compute.manager [req-abdc89e0-b884-42a5-af30-ee880393d440 req-d33ecb46-f8f7-4de9-b39e-d909a076fc81 service nova] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] No waiting events found dispatching network-vif-plugged-1989edae-bc69-457e-ab09-93742636d663 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 738.877673] env[62070]: WARNING nova.compute.manager [req-abdc89e0-b884-42a5-af30-ee880393d440 req-d33ecb46-f8f7-4de9-b39e-d909a076fc81 service nova] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Received unexpected event network-vif-plugged-1989edae-bc69-457e-ab09-93742636d663 for instance with vm_state building and task_state spawning. [ 738.884546] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121512, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.994482] env[62070]: DEBUG nova.network.neutron [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Successfully updated port: 1989edae-bc69-457e-ab09-93742636d663 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 739.015800] env[62070]: DEBUG oslo_vmware.api [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121513, 'name': PowerOffVM_Task, 'duration_secs': 0.436773} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.016102] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 739.017221] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5282902c-b248-44d2-a199-e1b3d57b301d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.036259] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87561351-5ea5-439c-b392-62363d9081d1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.069385] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 739.069678] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1df34c7e-d2d9-42f8-a26f-3b6d05e4b28f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.076334] env[62070]: DEBUG oslo_vmware.api [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 739.076334] env[62070]: value = "task-1121514" [ 739.076334] env[62070]: _type = "Task" [ 739.076334] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.079985] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.592s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.080479] env[62070]: DEBUG nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 739.083115] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.793s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.084921] env[62070]: INFO nova.compute.claims [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 739.096060] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] VM already powered off {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 739.096060] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 739.096060] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.096060] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.096318] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 739.096414] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-36012233-3dd0-49ac-aef2-f35714d14708 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.101731] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121510, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.607678} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.101939] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] fe378560-40b8-42c9-840d-b7d60de87c4d/fe378560-40b8-42c9-840d-b7d60de87c4d.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 739.102167] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 739.102409] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d8c8b1c0-7ed2-4913-9606-0012bba8e86c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.107783] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 739.108013] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 739.109999] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-417f0e3f-e2b5-4410-8a39-acde7f85175a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.112909] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 739.112909] env[62070]: value = "task-1121515" [ 739.112909] env[62070]: _type = "Task" [ 739.112909] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.118328] env[62070]: DEBUG oslo_vmware.api [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 739.118328] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52007acd-c527-73f4-f453-3db0d70a2aed" [ 739.118328] env[62070]: _type = "Task" [ 739.118328] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.125988] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121515, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.130764] env[62070]: DEBUG oslo_vmware.api [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52007acd-c527-73f4-f453-3db0d70a2aed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.263124] env[62070]: DEBUG nova.virt.hardware [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 739.263428] env[62070]: DEBUG nova.virt.hardware [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 739.263545] env[62070]: DEBUG nova.virt.hardware [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 739.263757] env[62070]: DEBUG nova.virt.hardware [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 739.263905] env[62070]: DEBUG nova.virt.hardware [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 739.265014] env[62070]: DEBUG nova.virt.hardware [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 739.265884] env[62070]: DEBUG nova.virt.hardware [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 739.266936] env[62070]: DEBUG nova.virt.hardware [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 739.268271] env[62070]: DEBUG nova.virt.hardware [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 739.268471] env[62070]: DEBUG nova.virt.hardware [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 739.268781] env[62070]: DEBUG nova.virt.hardware [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 739.278962] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0bd73fa-ea00-4e52-be65-89f63f32d77c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.307643] env[62070]: DEBUG oslo_vmware.api [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Waiting for the task: (returnval){ [ 739.307643] env[62070]: value = "task-1121516" [ 739.307643] env[62070]: _type = "Task" [ 739.307643] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.318179] env[62070]: DEBUG oslo_vmware.api [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121516, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.380277] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121512, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.497571] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquiring lock "refresh_cache-bcafa04d-904b-4eab-aba1-35180c2d4b22" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.497879] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquired lock "refresh_cache-bcafa04d-904b-4eab-aba1-35180c2d4b22" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.498094] env[62070]: DEBUG nova.network.neutron [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 739.590795] env[62070]: DEBUG nova.compute.utils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 739.595173] env[62070]: DEBUG nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 739.595356] env[62070]: DEBUG nova.network.neutron [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 739.625698] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121515, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.222556} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.626475] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 739.627245] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28fa619f-311f-422b-83d9-8fb1942599a2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.636040] env[62070]: DEBUG oslo_vmware.api [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52007acd-c527-73f4-f453-3db0d70a2aed, 'name': SearchDatastore_Task, 'duration_secs': 0.033384} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.636040] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdcc98a4-6ec7-45c4-99d8-0dc4c12de094 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.656626] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] fe378560-40b8-42c9-840d-b7d60de87c4d/fe378560-40b8-42c9-840d-b7d60de87c4d.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 739.657475] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6980ec73-55ac-4067-be20-b97b48ab6927 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.673514] env[62070]: DEBUG nova.policy [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af61f8424f7445e1bb04628a7ce52dd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a34c8ff9345b46c6976385bc455f7d5e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 739.677402] env[62070]: DEBUG oslo_vmware.api [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 739.677402] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5279935f-0af2-1f92-a871-27d7161ec93d" [ 739.677402] env[62070]: _type = "Task" [ 739.677402] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.682754] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 739.682754] env[62070]: value = "task-1121517" [ 739.682754] env[62070]: _type = "Task" [ 739.682754] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.690558] env[62070]: DEBUG oslo_vmware.api [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5279935f-0af2-1f92-a871-27d7161ec93d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.693772] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121517, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.821163] env[62070]: DEBUG oslo_vmware.api [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121516, 'name': ReconfigVM_Task, 'duration_secs': 0.487225} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.821628] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Updating instance 'd148d561-3211-4f1f-965a-f2b14cd60b11' progress to 33 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 739.882599] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121512, 'name': ReconfigVM_Task, 'duration_secs': 1.106335} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.882898] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Reconfigured VM instance instance-00000028 to attach disk [datastore2] 076aed5b-4b08-4f3b-a940-d9cd95c32e57/076aed5b-4b08-4f3b-a940-d9cd95c32e57.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 739.883894] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-628e2c0d-5589-4c7f-a82c-c795b148bc65 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.890253] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 739.890253] env[62070]: value = "task-1121518" [ 739.890253] env[62070]: _type = "Task" [ 739.890253] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.899361] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121518, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.000356] env[62070]: DEBUG nova.network.neutron [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Successfully created port: af767f4c-7d63-455c-b3df-198b16341c00 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 740.034888] env[62070]: DEBUG nova.network.neutron [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 740.099106] env[62070]: DEBUG nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 740.198373] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121517, 'name': ReconfigVM_Task, 'duration_secs': 0.392127} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.198495] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Reconfigured VM instance instance-00000029 to attach disk [datastore2] fe378560-40b8-42c9-840d-b7d60de87c4d/fe378560-40b8-42c9-840d-b7d60de87c4d.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 740.199206] env[62070]: DEBUG oslo_vmware.api [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5279935f-0af2-1f92-a871-27d7161ec93d, 'name': SearchDatastore_Task, 'duration_secs': 0.039707} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.199457] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f6cbe025-be68-4245-a58d-45a581cf3dca {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.200990] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.201244] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 963feecc-ff58-4cbb-8d6f-3f9035337087/43ea607c-7ece-4601-9b11-75c6a16aa7dd-rescue.vmdk. {{(pid=62070) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 740.201477] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-82944043-d948-4b4f-997b-f4ac9a4c88dd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.211789] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 740.211789] env[62070]: value = "task-1121519" [ 740.211789] env[62070]: _type = "Task" [ 740.211789] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.218150] env[62070]: DEBUG oslo_vmware.api [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 740.218150] env[62070]: value = "task-1121520" [ 740.218150] env[62070]: _type = "Task" [ 740.218150] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.222901] env[62070]: DEBUG nova.network.neutron [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Updating instance_info_cache with network_info: [{"id": "1989edae-bc69-457e-ab09-93742636d663", "address": "fa:16:3e:3e:dd:c5", "network": {"id": "c73b94bb-f960-4081-9afd-eb785d4bcf76", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1999445580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a34c8ff9345b46c6976385bc455f7d5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1989edae-bc", "ovs_interfaceid": "1989edae-bc69-457e-ab09-93742636d663", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.227670] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121519, 'name': Rename_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.237242] env[62070]: DEBUG oslo_vmware.api [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121520, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.329463] env[62070]: DEBUG nova.virt.hardware [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:23:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='56c7fbac-8f4f-47f8-9a34-b39636f74e40',id=36,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-884828689',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 740.329560] env[62070]: DEBUG nova.virt.hardware [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 740.329695] env[62070]: DEBUG nova.virt.hardware [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 740.329904] env[62070]: DEBUG nova.virt.hardware [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 740.330058] env[62070]: DEBUG nova.virt.hardware [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 740.330199] env[62070]: DEBUG nova.virt.hardware [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 740.330393] env[62070]: DEBUG nova.virt.hardware [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 740.330542] env[62070]: DEBUG nova.virt.hardware [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 740.330696] env[62070]: DEBUG nova.virt.hardware [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 740.330848] env[62070]: DEBUG nova.virt.hardware [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 740.331017] env[62070]: DEBUG nova.virt.hardware [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 740.337743] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Reconfiguring VM instance instance-00000020 to detach disk 2000 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 740.337743] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-109b419d-1ffd-4c52-8df6-008fbfd48e30 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.358448] env[62070]: DEBUG oslo_vmware.api [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Waiting for the task: (returnval){ [ 740.358448] env[62070]: value = "task-1121521" [ 740.358448] env[62070]: _type = "Task" [ 740.358448] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.370892] env[62070]: DEBUG oslo_vmware.api [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121521, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.400458] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121518, 'name': Rename_Task, 'duration_secs': 0.141021} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.402948] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 740.403459] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ce56fc2f-a6ed-49d2-b3ee-8a03e50966fe {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.409162] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 740.409162] env[62070]: value = "task-1121522" [ 740.409162] env[62070]: _type = "Task" [ 740.409162] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.420055] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121522, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.587978] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbfcb2e7-cb3d-40af-8da5-8a12edbe99fb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.595333] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11a76f33-5e6d-4302-9ce5-d9cb255fd87b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.627393] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b8639c7-473e-4849-bfa4-50c2bc2565f8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.634896] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-693fb33b-d298-4379-8b68-5f0e1819de88 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.650650] env[62070]: DEBUG nova.compute.provider_tree [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 740.722662] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121519, 'name': Rename_Task, 'duration_secs': 0.18174} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.725855] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 740.726107] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8f75addc-8085-44be-92e2-4e3237c99c69 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.732813] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Releasing lock "refresh_cache-bcafa04d-904b-4eab-aba1-35180c2d4b22" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.733191] env[62070]: DEBUG nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Instance network_info: |[{"id": "1989edae-bc69-457e-ab09-93742636d663", "address": "fa:16:3e:3e:dd:c5", "network": {"id": "c73b94bb-f960-4081-9afd-eb785d4bcf76", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1999445580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a34c8ff9345b46c6976385bc455f7d5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1989edae-bc", "ovs_interfaceid": "1989edae-bc69-457e-ab09-93742636d663", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 740.733489] env[62070]: DEBUG oslo_vmware.api [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121520, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.734825] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:dd:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b8b5b5e2-866d-4ab5-b74d-4a47de0c4877', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1989edae-bc69-457e-ab09-93742636d663', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 740.742576] env[62070]: DEBUG oslo.service.loopingcall [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 740.742825] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 740.742825] env[62070]: value = "task-1121523" [ 740.742825] env[62070]: _type = "Task" [ 740.742825] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.743040] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 740.743304] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d76a4a1a-3a07-4625-a50d-db81299ae1ca {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.767351] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121523, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.768720] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 740.768720] env[62070]: value = "task-1121524" [ 740.768720] env[62070]: _type = "Task" [ 740.768720] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.778167] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121524, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.870231] env[62070]: DEBUG oslo_vmware.api [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121521, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.919309] env[62070]: DEBUG nova.compute.manager [req-538e5f08-f2dd-495d-b61a-f787f82c5dcf req-e6616e34-516c-4e85-85df-eda72252abf0 service nova] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Received event network-changed-1989edae-bc69-457e-ab09-93742636d663 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 740.919309] env[62070]: DEBUG nova.compute.manager [req-538e5f08-f2dd-495d-b61a-f787f82c5dcf req-e6616e34-516c-4e85-85df-eda72252abf0 service nova] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Refreshing instance network info cache due to event network-changed-1989edae-bc69-457e-ab09-93742636d663. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 740.919622] env[62070]: DEBUG oslo_concurrency.lockutils [req-538e5f08-f2dd-495d-b61a-f787f82c5dcf req-e6616e34-516c-4e85-85df-eda72252abf0 service nova] Acquiring lock "refresh_cache-bcafa04d-904b-4eab-aba1-35180c2d4b22" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 740.920829] env[62070]: DEBUG oslo_concurrency.lockutils [req-538e5f08-f2dd-495d-b61a-f787f82c5dcf req-e6616e34-516c-4e85-85df-eda72252abf0 service nova] Acquired lock "refresh_cache-bcafa04d-904b-4eab-aba1-35180c2d4b22" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.920829] env[62070]: DEBUG nova.network.neutron [req-538e5f08-f2dd-495d-b61a-f787f82c5dcf req-e6616e34-516c-4e85-85df-eda72252abf0 service nova] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Refreshing network info cache for port 1989edae-bc69-457e-ab09-93742636d663 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 740.930638] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121522, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.133092] env[62070]: DEBUG nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 741.154484] env[62070]: DEBUG nova.scheduler.client.report [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 741.167582] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 741.167931] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 741.168150] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 741.168440] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 741.168662] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 741.168901] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 741.169246] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 741.169529] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 741.169770] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 741.170022] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 741.170261] env[62070]: DEBUG nova.virt.hardware [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 741.171333] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059792cb-c249-44ca-8c6d-674a999ac5d9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.182737] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-443bd304-aa60-4e41-9ac8-1c43d0d5f089 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.232616] env[62070]: DEBUG oslo_vmware.api [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121520, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.983352} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.232616] env[62070]: INFO nova.virt.vmwareapi.ds_util [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 963feecc-ff58-4cbb-8d6f-3f9035337087/43ea607c-7ece-4601-9b11-75c6a16aa7dd-rescue.vmdk. [ 741.233176] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4873d6f-7312-4387-acd3-0963a1aa5733 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.260905] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 963feecc-ff58-4cbb-8d6f-3f9035337087/43ea607c-7ece-4601-9b11-75c6a16aa7dd-rescue.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 741.264104] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6285c8d-686a-48f8-a091-0de835661837 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.284377] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121523, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.286269] env[62070]: DEBUG oslo_vmware.api [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 741.286269] env[62070]: value = "task-1121525" [ 741.286269] env[62070]: _type = "Task" [ 741.286269] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.292568] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121524, 'name': CreateVM_Task, 'duration_secs': 0.495623} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.293142] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 741.293867] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.294054] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.294368] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 741.297621] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b57711c-312c-49af-b5bc-1863e06d37f4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.299383] env[62070]: DEBUG oslo_vmware.api [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121525, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.302695] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 741.302695] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52a36b86-35db-528b-7b7b-efc803d27b24" [ 741.302695] env[62070]: _type = "Task" [ 741.302695] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.312930] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52a36b86-35db-528b-7b7b-efc803d27b24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.368591] env[62070]: DEBUG oslo_vmware.api [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121521, 'name': ReconfigVM_Task, 'duration_secs': 0.576676} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.368862] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Reconfigured VM instance instance-00000020 to detach disk 2000 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 741.369627] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b2c516-78fe-4235-b4b5-4f94363671f6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.390608] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] d148d561-3211-4f1f-965a-f2b14cd60b11/d148d561-3211-4f1f-965a-f2b14cd60b11.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 741.390862] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a7734ba-bb4f-44c5-a1bc-147f0a7b4ad8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.408866] env[62070]: DEBUG oslo_vmware.api [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Waiting for the task: (returnval){ [ 741.408866] env[62070]: value = "task-1121526" [ 741.408866] env[62070]: _type = "Task" [ 741.408866] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.419128] env[62070]: DEBUG oslo_vmware.api [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121526, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.421999] env[62070]: DEBUG oslo_vmware.api [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121522, 'name': PowerOnVM_Task, 'duration_secs': 0.580826} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.422252] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 741.422449] env[62070]: INFO nova.compute.manager [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Took 10.21 seconds to spawn the instance on the hypervisor. [ 741.422632] env[62070]: DEBUG nova.compute.manager [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 741.423381] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ebc452-9ed8-4670-ad50-92c411b651d2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.624269] env[62070]: DEBUG nova.network.neutron [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Successfully updated port: af767f4c-7d63-455c-b3df-198b16341c00 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 741.656171] env[62070]: DEBUG nova.network.neutron [req-538e5f08-f2dd-495d-b61a-f787f82c5dcf req-e6616e34-516c-4e85-85df-eda72252abf0 service nova] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Updated VIF entry in instance network info cache for port 1989edae-bc69-457e-ab09-93742636d663. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 741.656528] env[62070]: DEBUG nova.network.neutron [req-538e5f08-f2dd-495d-b61a-f787f82c5dcf req-e6616e34-516c-4e85-85df-eda72252abf0 service nova] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Updating instance_info_cache with network_info: [{"id": "1989edae-bc69-457e-ab09-93742636d663", "address": "fa:16:3e:3e:dd:c5", "network": {"id": "c73b94bb-f960-4081-9afd-eb785d4bcf76", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1999445580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a34c8ff9345b46c6976385bc455f7d5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1989edae-bc", "ovs_interfaceid": "1989edae-bc69-457e-ab09-93742636d663", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.660661] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.578s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 741.661146] env[62070]: DEBUG nova.compute.manager [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 741.663423] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.211s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 741.664849] env[62070]: INFO nova.compute.claims [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 741.763045] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121523, 'name': PowerOnVM_Task, 'duration_secs': 0.599871} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.763045] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 741.763263] env[62070]: INFO nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Took 8.48 seconds to spawn the instance on the hypervisor. [ 741.763355] env[62070]: DEBUG nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 741.764202] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9880eb57-fae5-4112-b073-182d24b7b3f4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.795669] env[62070]: DEBUG oslo_vmware.api [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121525, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.811738] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52a36b86-35db-528b-7b7b-efc803d27b24, 'name': SearchDatastore_Task, 'duration_secs': 0.012247} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.812841] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.812841] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 741.812841] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.812841] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.813054] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 741.813054] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1bbed43a-a26d-45b1-9e48-b8a3c16de519 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.826602] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 741.826666] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 741.827407] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e80ccf3-1594-4b12-bfb9-6042ad31c496 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.833055] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 741.833055] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52a9d0b3-6900-f8e5-1f8e-596e78812f92" [ 741.833055] env[62070]: _type = "Task" [ 741.833055] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.841681] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52a9d0b3-6900-f8e5-1f8e-596e78812f92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.918946] env[62070]: DEBUG oslo_vmware.api [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121526, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.945115] env[62070]: INFO nova.compute.manager [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Took 28.51 seconds to build instance. [ 742.127419] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquiring lock "refresh_cache-5a146d8f-6921-4b3e-8696-d2804fb855ba" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.127552] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquired lock "refresh_cache-5a146d8f-6921-4b3e-8696-d2804fb855ba" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.127673] env[62070]: DEBUG nova.network.neutron [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 742.159264] env[62070]: DEBUG oslo_concurrency.lockutils [req-538e5f08-f2dd-495d-b61a-f787f82c5dcf req-e6616e34-516c-4e85-85df-eda72252abf0 service nova] Releasing lock "refresh_cache-bcafa04d-904b-4eab-aba1-35180c2d4b22" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.168856] env[62070]: DEBUG nova.compute.utils [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 742.170299] env[62070]: DEBUG nova.compute.manager [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 742.170437] env[62070]: DEBUG nova.network.neutron [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 742.257461] env[62070]: DEBUG nova.policy [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9e9e1b387c0f464cbb71bd1a51c6adb0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dd860ef9c03045979ef05776693c9776', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 742.282907] env[62070]: INFO nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Took 26.77 seconds to build instance. [ 742.301690] env[62070]: DEBUG oslo_vmware.api [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121525, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.343555] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52a9d0b3-6900-f8e5-1f8e-596e78812f92, 'name': SearchDatastore_Task, 'duration_secs': 0.03529} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.344366] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a6c563f-c1dd-4525-b2cf-ccad48591f7e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.349771] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 742.349771] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52aa5228-b8cd-70b7-aa9f-542549ca1c5b" [ 742.349771] env[62070]: _type = "Task" [ 742.349771] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.357747] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52aa5228-b8cd-70b7-aa9f-542549ca1c5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.419492] env[62070]: DEBUG oslo_vmware.api [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121526, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.446878] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0046a5be-b671-4771-8172-fdb6e585fc1d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "076aed5b-4b08-4f3b-a940-d9cd95c32e57" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 112.232s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.548506] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "1c1730e5-88af-4c7f-8bcc-d494db2cd723" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.548735] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "1c1730e5-88af-4c7f-8bcc-d494db2cd723" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 742.548947] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "1c1730e5-88af-4c7f-8bcc-d494db2cd723-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.549148] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "1c1730e5-88af-4c7f-8bcc-d494db2cd723-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 742.549317] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "1c1730e5-88af-4c7f-8bcc-d494db2cd723-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.551409] env[62070]: INFO nova.compute.manager [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Terminating instance [ 742.553262] env[62070]: DEBUG nova.compute.manager [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 742.553455] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 742.554263] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7669e3a-72e9-434f-9137-5215fbeebe0f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.565511] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 742.565836] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-47c64ea9-7df9-475c-8bc7-8a165c6f0ee8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.572622] env[62070]: DEBUG oslo_vmware.api [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 742.572622] env[62070]: value = "task-1121527" [ 742.572622] env[62070]: _type = "Task" [ 742.572622] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.582120] env[62070]: DEBUG oslo_vmware.api [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121527, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.611418] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "076aed5b-4b08-4f3b-a940-d9cd95c32e57" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.611730] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "076aed5b-4b08-4f3b-a940-d9cd95c32e57" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 742.611974] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "076aed5b-4b08-4f3b-a940-d9cd95c32e57-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.612289] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "076aed5b-4b08-4f3b-a940-d9cd95c32e57-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 742.612571] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "076aed5b-4b08-4f3b-a940-d9cd95c32e57-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.615442] env[62070]: INFO nova.compute.manager [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Terminating instance [ 742.618051] env[62070]: DEBUG nova.compute.manager [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 742.618310] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 742.619351] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dedd072e-bc8b-4ca0-8349-b5126da64646 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.627483] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 742.627601] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-edd3f241-d480-4fa0-943e-7e8649e55b58 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.636447] env[62070]: DEBUG oslo_vmware.api [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 742.636447] env[62070]: value = "task-1121528" [ 742.636447] env[62070]: _type = "Task" [ 742.636447] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.647654] env[62070]: DEBUG oslo_vmware.api [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121528, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.670465] env[62070]: DEBUG nova.network.neutron [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 742.673853] env[62070]: DEBUG nova.compute.manager [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 742.715162] env[62070]: DEBUG nova.network.neutron [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Successfully created port: c29b4364-dedb-48eb-b91d-45b2a60f1bdd {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 742.783981] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "fe378560-40b8-42c9-840d-b7d60de87c4d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 108.456s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.806013] env[62070]: DEBUG oslo_vmware.api [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121525, 'name': ReconfigVM_Task, 'duration_secs': 1.224522} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.809211] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 963feecc-ff58-4cbb-8d6f-3f9035337087/43ea607c-7ece-4601-9b11-75c6a16aa7dd-rescue.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 742.810937] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-360f13e2-34ae-46ce-84cf-9459c557d70f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.848563] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-25204b55-593e-48f1-82c6-12c46bddc8fe {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.870761] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52aa5228-b8cd-70b7-aa9f-542549ca1c5b, 'name': SearchDatastore_Task, 'duration_secs': 0.041268} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.875080] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.875417] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] bcafa04d-904b-4eab-aba1-35180c2d4b22/bcafa04d-904b-4eab-aba1-35180c2d4b22.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 742.875867] env[62070]: DEBUG oslo_vmware.api [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 742.875867] env[62070]: value = "task-1121529" [ 742.875867] env[62070]: _type = "Task" [ 742.875867] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.877313] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b2d3948-420c-4dd0-bc26-cb8032d702b9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.889048] env[62070]: DEBUG oslo_vmware.api [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121529, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.890570] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 742.890570] env[62070]: value = "task-1121530" [ 742.890570] env[62070]: _type = "Task" [ 742.890570] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.904649] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121530, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.919148] env[62070]: DEBUG nova.network.neutron [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Updating instance_info_cache with network_info: [{"id": "af767f4c-7d63-455c-b3df-198b16341c00", "address": "fa:16:3e:b4:44:12", "network": {"id": "c73b94bb-f960-4081-9afd-eb785d4bcf76", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1999445580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a34c8ff9345b46c6976385bc455f7d5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf767f4c-7d", "ovs_interfaceid": "af767f4c-7d63-455c-b3df-198b16341c00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.926678] env[62070]: DEBUG oslo_vmware.api [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121526, 'name': ReconfigVM_Task, 'duration_secs': 1.13116} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.927010] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Reconfigured VM instance instance-00000020 to attach disk [datastore1] d148d561-3211-4f1f-965a-f2b14cd60b11/d148d561-3211-4f1f-965a-f2b14cd60b11.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 742.927375] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Updating instance 'd148d561-3211-4f1f-965a-f2b14cd60b11' progress to 50 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 742.946686] env[62070]: DEBUG nova.compute.manager [req-85a76c04-f78e-4590-9eb8-a5615c18e752 req-5f4cad31-2489-41f1-8ee7-70f10e5fe97c service nova] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Received event network-vif-plugged-af767f4c-7d63-455c-b3df-198b16341c00 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 742.946987] env[62070]: DEBUG oslo_concurrency.lockutils [req-85a76c04-f78e-4590-9eb8-a5615c18e752 req-5f4cad31-2489-41f1-8ee7-70f10e5fe97c service nova] Acquiring lock "5a146d8f-6921-4b3e-8696-d2804fb855ba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.947259] env[62070]: DEBUG oslo_concurrency.lockutils [req-85a76c04-f78e-4590-9eb8-a5615c18e752 req-5f4cad31-2489-41f1-8ee7-70f10e5fe97c service nova] Lock "5a146d8f-6921-4b3e-8696-d2804fb855ba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 742.947473] env[62070]: DEBUG oslo_concurrency.lockutils [req-85a76c04-f78e-4590-9eb8-a5615c18e752 req-5f4cad31-2489-41f1-8ee7-70f10e5fe97c service nova] Lock "5a146d8f-6921-4b3e-8696-d2804fb855ba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.947698] env[62070]: DEBUG nova.compute.manager [req-85a76c04-f78e-4590-9eb8-a5615c18e752 req-5f4cad31-2489-41f1-8ee7-70f10e5fe97c service nova] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] No waiting events found dispatching network-vif-plugged-af767f4c-7d63-455c-b3df-198b16341c00 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 742.947910] env[62070]: WARNING nova.compute.manager [req-85a76c04-f78e-4590-9eb8-a5615c18e752 req-5f4cad31-2489-41f1-8ee7-70f10e5fe97c service nova] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Received unexpected event network-vif-plugged-af767f4c-7d63-455c-b3df-198b16341c00 for instance with vm_state building and task_state spawning. [ 742.948112] env[62070]: DEBUG nova.compute.manager [req-85a76c04-f78e-4590-9eb8-a5615c18e752 req-5f4cad31-2489-41f1-8ee7-70f10e5fe97c service nova] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Received event network-changed-af767f4c-7d63-455c-b3df-198b16341c00 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 742.948315] env[62070]: DEBUG nova.compute.manager [req-85a76c04-f78e-4590-9eb8-a5615c18e752 req-5f4cad31-2489-41f1-8ee7-70f10e5fe97c service nova] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Refreshing instance network info cache due to event network-changed-af767f4c-7d63-455c-b3df-198b16341c00. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 742.948513] env[62070]: DEBUG oslo_concurrency.lockutils [req-85a76c04-f78e-4590-9eb8-a5615c18e752 req-5f4cad31-2489-41f1-8ee7-70f10e5fe97c service nova] Acquiring lock "refresh_cache-5a146d8f-6921-4b3e-8696-d2804fb855ba" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.951887] env[62070]: DEBUG nova.compute.manager [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 743.090876] env[62070]: DEBUG oslo_vmware.api [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121527, 'name': PowerOffVM_Task, 'duration_secs': 0.218215} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.091277] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 743.091445] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 743.091799] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b8ebd36b-201a-46c8-8027-b90f582677ad {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.148513] env[62070]: DEBUG oslo_vmware.api [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121528, 'name': PowerOffVM_Task, 'duration_secs': 0.417702} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.148513] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 743.148802] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 743.148802] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b706085-3bb2-4d9d-a7d2-18bc28fdbaaa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.220229] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 743.220670] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 743.220832] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Deleting the datastore file [datastore2] 076aed5b-4b08-4f3b-a940-d9cd95c32e57 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 743.224460] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-209887b9-5ee3-4713-9983-e5424701a53a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.231548] env[62070]: DEBUG oslo_vmware.api [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 743.231548] env[62070]: value = "task-1121533" [ 743.231548] env[62070]: _type = "Task" [ 743.231548] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.245331] env[62070]: DEBUG oslo_vmware.api [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121533, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.287536] env[62070]: DEBUG nova.compute.manager [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 743.297712] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ddb460-cfb3-4e2b-be62-bd84f3ec409f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.307294] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98abbb2d-d335-4a75-9516-0fc064abfea9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.347636] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb56467e-edf1-443a-aa8f-27a201ebb45f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.357103] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ee4748b-b63e-42dc-a176-4a4779390919 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.373311] env[62070]: DEBUG nova.compute.provider_tree [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 743.390608] env[62070]: DEBUG oslo_vmware.api [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121529, 'name': ReconfigVM_Task, 'duration_secs': 0.164938} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.391010] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 743.394733] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-62a8a43d-4c53-4c10-a500-fd7a4fa31184 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.402288] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121530, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.511507} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.403738] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] bcafa04d-904b-4eab-aba1-35180c2d4b22/bcafa04d-904b-4eab-aba1-35180c2d4b22.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 743.403985] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 743.404318] env[62070]: DEBUG oslo_vmware.api [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 743.404318] env[62070]: value = "task-1121534" [ 743.404318] env[62070]: _type = "Task" [ 743.404318] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.404510] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eb5e7fc4-2892-4ea9-9342-23cf32b7c3b1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.415914] env[62070]: DEBUG oslo_vmware.api [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121534, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.417035] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 743.417035] env[62070]: value = "task-1121535" [ 743.417035] env[62070]: _type = "Task" [ 743.417035] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.421844] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Releasing lock "refresh_cache-5a146d8f-6921-4b3e-8696-d2804fb855ba" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.422057] env[62070]: DEBUG nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Instance network_info: |[{"id": "af767f4c-7d63-455c-b3df-198b16341c00", "address": "fa:16:3e:b4:44:12", "network": {"id": "c73b94bb-f960-4081-9afd-eb785d4bcf76", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1999445580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a34c8ff9345b46c6976385bc455f7d5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf767f4c-7d", "ovs_interfaceid": "af767f4c-7d63-455c-b3df-198b16341c00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 743.425093] env[62070]: DEBUG oslo_concurrency.lockutils [req-85a76c04-f78e-4590-9eb8-a5615c18e752 req-5f4cad31-2489-41f1-8ee7-70f10e5fe97c service nova] Acquired lock "refresh_cache-5a146d8f-6921-4b3e-8696-d2804fb855ba" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.425261] env[62070]: DEBUG nova.network.neutron [req-85a76c04-f78e-4590-9eb8-a5615c18e752 req-5f4cad31-2489-41f1-8ee7-70f10e5fe97c service nova] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Refreshing network info cache for port af767f4c-7d63-455c-b3df-198b16341c00 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 743.426634] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:44:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b8b5b5e2-866d-4ab5-b74d-4a47de0c4877', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'af767f4c-7d63-455c-b3df-198b16341c00', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 743.434754] env[62070]: DEBUG oslo.service.loopingcall [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 743.436426] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121535, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.440073] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 743.441613] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ad9445-ca82-46eb-b343-b262c62d4b49 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.444405] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f9c2bd2a-fd05-44d9-abad-a043e322a881 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.481177] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-554c34cd-75b4-41ba-8641-13ebe389ea6c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.483790] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 743.483790] env[62070]: value = "task-1121536" [ 743.483790] env[62070]: _type = "Task" [ 743.483790] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.500383] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Updating instance 'd148d561-3211-4f1f-965a-f2b14cd60b11' progress to 67 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 743.508658] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.516443] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121536, 'name': CreateVM_Task} progress is 15%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.695737] env[62070]: DEBUG nova.compute.manager [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 743.743901] env[62070]: DEBUG oslo_vmware.api [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121533, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.392817} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.745935] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 743.745935] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 743.745935] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 743.745935] env[62070]: INFO nova.compute.manager [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Took 1.13 seconds to destroy the instance on the hypervisor. [ 743.746453] env[62070]: DEBUG oslo.service.loopingcall [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 743.748601] env[62070]: DEBUG nova.virt.hardware [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 743.748909] env[62070]: DEBUG nova.virt.hardware [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 743.749048] env[62070]: DEBUG nova.virt.hardware [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 743.749194] env[62070]: DEBUG nova.virt.hardware [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 743.749285] env[62070]: DEBUG nova.virt.hardware [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 743.749456] env[62070]: DEBUG nova.virt.hardware [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 743.749608] env[62070]: DEBUG nova.virt.hardware [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 743.749755] env[62070]: DEBUG nova.virt.hardware [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 743.749915] env[62070]: DEBUG nova.virt.hardware [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 743.750083] env[62070]: DEBUG nova.virt.hardware [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 743.750265] env[62070]: DEBUG nova.virt.hardware [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 743.750671] env[62070]: DEBUG nova.compute.manager [-] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 743.750671] env[62070]: DEBUG nova.network.neutron [-] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 743.752908] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dfa5503-c0df-41c5-ae4a-5c586c84f8c9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.763407] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d3926e2-7320-41dc-a361-0d039eb3dc8c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.813061] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.861170] env[62070]: DEBUG nova.network.neutron [req-85a76c04-f78e-4590-9eb8-a5615c18e752 req-5f4cad31-2489-41f1-8ee7-70f10e5fe97c service nova] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Updated VIF entry in instance network info cache for port af767f4c-7d63-455c-b3df-198b16341c00. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 743.861599] env[62070]: DEBUG nova.network.neutron [req-85a76c04-f78e-4590-9eb8-a5615c18e752 req-5f4cad31-2489-41f1-8ee7-70f10e5fe97c service nova] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Updating instance_info_cache with network_info: [{"id": "af767f4c-7d63-455c-b3df-198b16341c00", "address": "fa:16:3e:b4:44:12", "network": {"id": "c73b94bb-f960-4081-9afd-eb785d4bcf76", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1999445580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a34c8ff9345b46c6976385bc455f7d5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf767f4c-7d", "ovs_interfaceid": "af767f4c-7d63-455c-b3df-198b16341c00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.880849] env[62070]: DEBUG nova.scheduler.client.report [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 743.922022] env[62070]: DEBUG oslo_vmware.api [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121534, 'name': PowerOnVM_Task, 'duration_secs': 0.494569} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.925380] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 743.932927] env[62070]: DEBUG nova.compute.manager [None req-ef1f59bf-97b7-4c08-92d0-c5b1ec61ff75 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 743.934960] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-418c3fb2-b062-4fb3-b2a4-a0547aefaa66 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.942893] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121535, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085682} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.943922] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 743.944407] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b1bc2bb-732e-4a87-8c5f-f0f5cd52bfd1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.970753] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Reconfiguring VM instance instance-0000002a to attach disk [datastore2] bcafa04d-904b-4eab-aba1-35180c2d4b22/bcafa04d-904b-4eab-aba1-35180c2d4b22.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 743.971368] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2755c040-2820-44cd-87f7-5b203904954b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.995603] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121536, 'name': CreateVM_Task, 'duration_secs': 0.488239} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.996795] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 743.997384] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 743.997384] env[62070]: value = "task-1121537" [ 743.997384] env[62070]: _type = "Task" [ 743.997384] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.997849] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.998052] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.998373] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 743.998673] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edecb977-2bc8-4aff-acea-34683c88c0ea {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.013316] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121537, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.013626] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 744.013626] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52b1a9f8-674b-17fc-0ee0-f3c76a442fb4" [ 744.013626] env[62070]: _type = "Task" [ 744.013626] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.021416] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52b1a9f8-674b-17fc-0ee0-f3c76a442fb4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.072350] env[62070]: DEBUG nova.network.neutron [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Port c5e6098a-ebbb-4eee-ba72-4ddaad679830 binding to destination host cpu-1 is already ACTIVE {{(pid=62070) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 744.365360] env[62070]: DEBUG oslo_concurrency.lockutils [req-85a76c04-f78e-4590-9eb8-a5615c18e752 req-5f4cad31-2489-41f1-8ee7-70f10e5fe97c service nova] Releasing lock "refresh_cache-5a146d8f-6921-4b3e-8696-d2804fb855ba" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.390731] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.727s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.391357] env[62070]: DEBUG nova.compute.manager [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 744.393958] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.180s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.400178] env[62070]: INFO nova.compute.claims [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 744.513775] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121537, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.518021] env[62070]: DEBUG nova.compute.manager [req-2e32f3d0-0cf7-44d6-97d9-4522d1e0ed41 req-8301d5ec-b317-42ee-8e22-e142a62825f1 service nova] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Received event network-vif-plugged-c29b4364-dedb-48eb-b91d-45b2a60f1bdd {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 744.518021] env[62070]: DEBUG oslo_concurrency.lockutils [req-2e32f3d0-0cf7-44d6-97d9-4522d1e0ed41 req-8301d5ec-b317-42ee-8e22-e142a62825f1 service nova] Acquiring lock "42a5c5d8-5c3a-4568-b212-d87f2951a334-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.518021] env[62070]: DEBUG oslo_concurrency.lockutils [req-2e32f3d0-0cf7-44d6-97d9-4522d1e0ed41 req-8301d5ec-b317-42ee-8e22-e142a62825f1 service nova] Lock "42a5c5d8-5c3a-4568-b212-d87f2951a334-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.518447] env[62070]: DEBUG oslo_concurrency.lockutils [req-2e32f3d0-0cf7-44d6-97d9-4522d1e0ed41 req-8301d5ec-b317-42ee-8e22-e142a62825f1 service nova] Lock "42a5c5d8-5c3a-4568-b212-d87f2951a334-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.518447] env[62070]: DEBUG nova.compute.manager [req-2e32f3d0-0cf7-44d6-97d9-4522d1e0ed41 req-8301d5ec-b317-42ee-8e22-e142a62825f1 service nova] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] No waiting events found dispatching network-vif-plugged-c29b4364-dedb-48eb-b91d-45b2a60f1bdd {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 744.518634] env[62070]: WARNING nova.compute.manager [req-2e32f3d0-0cf7-44d6-97d9-4522d1e0ed41 req-8301d5ec-b317-42ee-8e22-e142a62825f1 service nova] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Received unexpected event network-vif-plugged-c29b4364-dedb-48eb-b91d-45b2a60f1bdd for instance with vm_state building and task_state spawning. [ 744.528632] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52b1a9f8-674b-17fc-0ee0-f3c76a442fb4, 'name': SearchDatastore_Task, 'duration_secs': 0.024743} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.528843] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.529080] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 744.529313] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 744.529458] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.529634] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 744.529894] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e71e1c4-bb58-4d4f-b83d-3aa363046bcc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.541039] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 744.541039] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 744.541234] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6136169-4b73-4e1e-8a46-bd4ce1e62e48 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.548122] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 744.548122] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5260c77f-acd6-3bff-8e0d-bbeedebe27e8" [ 744.548122] env[62070]: _type = "Task" [ 744.548122] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.559143] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5260c77f-acd6-3bff-8e0d-bbeedebe27e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.597015] env[62070]: DEBUG nova.network.neutron [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Successfully updated port: c29b4364-dedb-48eb-b91d-45b2a60f1bdd {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 744.615461] env[62070]: DEBUG nova.network.neutron [-] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.905461] env[62070]: DEBUG nova.compute.utils [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 744.909162] env[62070]: DEBUG nova.compute.manager [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 744.909162] env[62070]: DEBUG nova.network.neutron [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 744.976873] env[62070]: DEBUG nova.policy [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ac722bb48fa841f5a85dde2add8fcf60', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c4b0335b052b4d3c80922314fa650b30', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 745.010923] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121537, 'name': ReconfigVM_Task, 'duration_secs': 0.605675} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.011689] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Reconfigured VM instance instance-0000002a to attach disk [datastore2] bcafa04d-904b-4eab-aba1-35180c2d4b22/bcafa04d-904b-4eab-aba1-35180c2d4b22.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 745.012654] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a1252c4e-bc2d-4a49-8790-5fec4f4d1e81 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.019495] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 745.019495] env[62070]: value = "task-1121538" [ 745.019495] env[62070]: _type = "Task" [ 745.019495] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.028185] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121538, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.058403] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5260c77f-acd6-3bff-8e0d-bbeedebe27e8, 'name': SearchDatastore_Task, 'duration_secs': 0.01418} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.059379] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c7fd7af-35e9-4a42-ab9f-9a51a9adf5c9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.065024] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 745.065024] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]526acbe8-43c7-e468-4190-73fdd2ab0a91" [ 745.065024] env[62070]: _type = "Task" [ 745.065024] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.074349] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]526acbe8-43c7-e468-4190-73fdd2ab0a91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.095351] env[62070]: DEBUG oslo_concurrency.lockutils [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquiring lock "d148d561-3211-4f1f-965a-f2b14cd60b11-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.095772] env[62070]: DEBUG oslo_concurrency.lockutils [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Lock "d148d561-3211-4f1f-965a-f2b14cd60b11-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.096081] env[62070]: DEBUG oslo_concurrency.lockutils [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Lock "d148d561-3211-4f1f-965a-f2b14cd60b11-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.102388] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Acquiring lock "refresh_cache-42a5c5d8-5c3a-4568-b212-d87f2951a334" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 745.105518] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Acquired lock "refresh_cache-42a5c5d8-5c3a-4568-b212-d87f2951a334" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.105518] env[62070]: DEBUG nova.network.neutron [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 745.117181] env[62070]: DEBUG nova.compute.manager [req-48259ff3-c3ae-4123-bacb-6478319bec88 req-22cafb09-930e-4ead-930e-c429bc8a05e1 service nova] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Received event network-vif-deleted-d4fdb36b-1cfc-4dcd-86b8-98a8769b4224 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 745.118162] env[62070]: INFO nova.compute.manager [-] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Took 1.37 seconds to deallocate network for instance. [ 745.278300] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 745.278300] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 745.278300] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Deleting the datastore file [datastore1] 1c1730e5-88af-4c7f-8bcc-d494db2cd723 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 745.278300] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-43cc9296-3e3c-4017-9f4d-72167f4710a3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.284885] env[62070]: DEBUG oslo_vmware.api [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 745.284885] env[62070]: value = "task-1121539" [ 745.284885] env[62070]: _type = "Task" [ 745.284885] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.293202] env[62070]: DEBUG oslo_vmware.api [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121539, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.349703] env[62070]: DEBUG nova.network.neutron [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Successfully created port: fcd8b688-1cad-4cae-a1db-2d606703425c {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 745.362660] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquiring lock "e74fd58c-cfa8-45c4-8f02-96234b4a9192" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.362908] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Lock "e74fd58c-cfa8-45c4-8f02-96234b4a9192" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.412341] env[62070]: DEBUG nova.compute.manager [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 745.533325] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121538, 'name': Rename_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.578456] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]526acbe8-43c7-e468-4190-73fdd2ab0a91, 'name': SearchDatastore_Task, 'duration_secs': 0.025433} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.578695] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 745.579285] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 5a146d8f-6921-4b3e-8696-d2804fb855ba/5a146d8f-6921-4b3e-8696-d2804fb855ba.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 745.579285] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-02bf976f-08e1-494f-9fec-0ba487bceb0b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.586828] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 745.586828] env[62070]: value = "task-1121540" [ 745.586828] env[62070]: _type = "Task" [ 745.586828] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.594987] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121540, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.627563] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.639177] env[62070]: DEBUG nova.network.neutron [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 745.794736] env[62070]: DEBUG oslo_vmware.api [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121539, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.449873} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.798078] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 745.798078] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 745.798078] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 745.798078] env[62070]: INFO nova.compute.manager [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Took 3.24 seconds to destroy the instance on the hypervisor. [ 745.798078] env[62070]: DEBUG oslo.service.loopingcall [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 745.798470] env[62070]: DEBUG nova.compute.manager [-] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 745.798510] env[62070]: DEBUG nova.network.neutron [-] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 745.852225] env[62070]: DEBUG nova.network.neutron [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Updating instance_info_cache with network_info: [{"id": "c29b4364-dedb-48eb-b91d-45b2a60f1bdd", "address": "fa:16:3e:37:15:17", "network": {"id": "df33a08d-88db-4a22-846f-5b414705fc65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.28", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9d42cb2bbadf40d6b35f237f71234611", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc29b4364-de", "ovs_interfaceid": "c29b4364-dedb-48eb-b91d-45b2a60f1bdd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.907194] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf1cbc8-82dc-4918-af13-1ba99b8c140e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.918231] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88e4979b-fd0d-436d-9c06-eb3af0ad8586 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.957080] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4687962-4a82-4730-8776-7b680aea5cb6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.965098] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cdb38e4-0879-4b77-b677-01c0c5de731d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.979615] env[62070]: DEBUG nova.compute.provider_tree [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 746.030348] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121538, 'name': Rename_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.100646] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121540, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.156097] env[62070]: DEBUG oslo_concurrency.lockutils [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquiring lock "refresh_cache-d148d561-3211-4f1f-965a-f2b14cd60b11" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.156308] env[62070]: DEBUG oslo_concurrency.lockutils [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquired lock "refresh_cache-d148d561-3211-4f1f-965a-f2b14cd60b11" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.156511] env[62070]: DEBUG nova.network.neutron [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 746.357069] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Releasing lock "refresh_cache-42a5c5d8-5c3a-4568-b212-d87f2951a334" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.357174] env[62070]: DEBUG nova.compute.manager [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Instance network_info: |[{"id": "c29b4364-dedb-48eb-b91d-45b2a60f1bdd", "address": "fa:16:3e:37:15:17", "network": {"id": "df33a08d-88db-4a22-846f-5b414705fc65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.28", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9d42cb2bbadf40d6b35f237f71234611", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc29b4364-de", "ovs_interfaceid": "c29b4364-dedb-48eb-b91d-45b2a60f1bdd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 746.357562] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:15:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4728adca-2846-416a-91a3-deb898faf1f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c29b4364-dedb-48eb-b91d-45b2a60f1bdd', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 746.365575] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Creating folder: Project (dd860ef9c03045979ef05776693c9776). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 746.365926] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-09470118-259b-4c3a-a69a-170c39b6140c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.377538] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Created folder: Project (dd860ef9c03045979ef05776693c9776) in parent group-v245319. [ 746.377811] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Creating folder: Instances. Parent ref: group-v245357. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 746.378118] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-677aa245-cfc5-4429-a7d3-119845979333 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.388652] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Created folder: Instances in parent group-v245357. [ 746.389045] env[62070]: DEBUG oslo.service.loopingcall [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 746.389303] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 746.389577] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7629e4f2-6216-454b-a0ec-47c92bc84b91 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.411101] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 746.411101] env[62070]: value = "task-1121543" [ 746.411101] env[62070]: _type = "Task" [ 746.411101] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.420841] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121543, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.428644] env[62070]: DEBUG nova.compute.manager [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 746.456624] env[62070]: DEBUG nova.virt.hardware [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 746.457017] env[62070]: DEBUG nova.virt.hardware [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 746.457213] env[62070]: DEBUG nova.virt.hardware [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 746.457452] env[62070]: DEBUG nova.virt.hardware [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 746.457605] env[62070]: DEBUG nova.virt.hardware [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 746.457762] env[62070]: DEBUG nova.virt.hardware [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 746.458017] env[62070]: DEBUG nova.virt.hardware [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 746.458227] env[62070]: DEBUG nova.virt.hardware [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 746.458428] env[62070]: DEBUG nova.virt.hardware [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 746.458685] env[62070]: DEBUG nova.virt.hardware [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 746.458750] env[62070]: DEBUG nova.virt.hardware [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 746.459700] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3182ba75-6760-4587-9130-25233bd3472b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.467814] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e6a04e1-f6ff-4ba2-ba4a-7e3f7fb7f0eb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.484184] env[62070]: DEBUG nova.scheduler.client.report [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 746.532149] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121538, 'name': Rename_Task, 'duration_secs': 1.299685} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.532496] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 746.533026] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-debac4c1-da2d-4daf-bf5a-03eb2c8a895a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.539921] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 746.539921] env[62070]: value = "task-1121544" [ 746.539921] env[62070]: _type = "Task" [ 746.539921] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.549468] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121544, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.575958] env[62070]: DEBUG nova.compute.manager [req-8d61c982-46d8-4f36-8e53-c0df7b6b62e9 req-b08d39c4-1ecc-41e6-89c2-eb4f63f6b921 service nova] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Received event network-changed-c29b4364-dedb-48eb-b91d-45b2a60f1bdd {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 746.576177] env[62070]: DEBUG nova.compute.manager [req-8d61c982-46d8-4f36-8e53-c0df7b6b62e9 req-b08d39c4-1ecc-41e6-89c2-eb4f63f6b921 service nova] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Refreshing instance network info cache due to event network-changed-c29b4364-dedb-48eb-b91d-45b2a60f1bdd. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 746.576400] env[62070]: DEBUG oslo_concurrency.lockutils [req-8d61c982-46d8-4f36-8e53-c0df7b6b62e9 req-b08d39c4-1ecc-41e6-89c2-eb4f63f6b921 service nova] Acquiring lock "refresh_cache-42a5c5d8-5c3a-4568-b212-d87f2951a334" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.576543] env[62070]: DEBUG oslo_concurrency.lockutils [req-8d61c982-46d8-4f36-8e53-c0df7b6b62e9 req-b08d39c4-1ecc-41e6-89c2-eb4f63f6b921 service nova] Acquired lock "refresh_cache-42a5c5d8-5c3a-4568-b212-d87f2951a334" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.576753] env[62070]: DEBUG nova.network.neutron [req-8d61c982-46d8-4f36-8e53-c0df7b6b62e9 req-b08d39c4-1ecc-41e6-89c2-eb4f63f6b921 service nova] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Refreshing network info cache for port c29b4364-dedb-48eb-b91d-45b2a60f1bdd {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 746.598616] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121540, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.612217] env[62070]: DEBUG nova.network.neutron [-] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.887706] env[62070]: DEBUG nova.network.neutron [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Updating instance_info_cache with network_info: [{"id": "c5e6098a-ebbb-4eee-ba72-4ddaad679830", "address": "fa:16:3e:7b:ab:3c", "network": {"id": "df33a08d-88db-4a22-846f-5b414705fc65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9d42cb2bbadf40d6b35f237f71234611", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5e6098a-eb", "ovs_interfaceid": "c5e6098a-ebbb-4eee-ba72-4ddaad679830", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.921591] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121543, 'name': CreateVM_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.975123] env[62070]: DEBUG nova.network.neutron [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Successfully updated port: fcd8b688-1cad-4cae-a1db-2d606703425c {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 746.989614] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.595s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.990248] env[62070]: DEBUG nova.compute.manager [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 746.994026] env[62070]: DEBUG oslo_concurrency.lockutils [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.374s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.994692] env[62070]: DEBUG nova.objects.instance [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Lazy-loading 'resources' on Instance uuid 30d782e4-30c7-41f6-b30d-95a9a59cf83c {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 747.050555] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121544, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.098214] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121540, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.224699} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.098485] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 5a146d8f-6921-4b3e-8696-d2804fb855ba/5a146d8f-6921-4b3e-8696-d2804fb855ba.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 747.098719] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 747.098967] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-86e9141e-2f41-4224-b243-75ed8a74951e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.105267] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 747.105267] env[62070]: value = "task-1121545" [ 747.105267] env[62070]: _type = "Task" [ 747.105267] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.114798] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121545, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.119762] env[62070]: INFO nova.compute.manager [-] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Took 1.32 seconds to deallocate network for instance. [ 747.166223] env[62070]: DEBUG nova.compute.manager [req-ff43a335-3c57-4e83-a537-93c2a8e26f5c req-92d46e60-7ee8-408d-8717-19baa8607849 service nova] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Received event network-vif-deleted-c8fbe566-a7d6-48e0-9e05-0bcb216b8111 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 747.340449] env[62070]: DEBUG nova.network.neutron [req-8d61c982-46d8-4f36-8e53-c0df7b6b62e9 req-b08d39c4-1ecc-41e6-89c2-eb4f63f6b921 service nova] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Updated VIF entry in instance network info cache for port c29b4364-dedb-48eb-b91d-45b2a60f1bdd. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 747.340836] env[62070]: DEBUG nova.network.neutron [req-8d61c982-46d8-4f36-8e53-c0df7b6b62e9 req-b08d39c4-1ecc-41e6-89c2-eb4f63f6b921 service nova] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Updating instance_info_cache with network_info: [{"id": "c29b4364-dedb-48eb-b91d-45b2a60f1bdd", "address": "fa:16:3e:37:15:17", "network": {"id": "df33a08d-88db-4a22-846f-5b414705fc65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.28", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9d42cb2bbadf40d6b35f237f71234611", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc29b4364-de", "ovs_interfaceid": "c29b4364-dedb-48eb-b91d-45b2a60f1bdd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.391758] env[62070]: DEBUG oslo_concurrency.lockutils [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Releasing lock "refresh_cache-d148d561-3211-4f1f-965a-f2b14cd60b11" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 747.421873] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121543, 'name': CreateVM_Task, 'duration_secs': 0.693327} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.422250] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 747.422880] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 747.423123] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.423490] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 747.423793] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfbe094d-22cc-4b3c-8add-2ef407582596 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.428805] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Waiting for the task: (returnval){ [ 747.428805] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]529c1851-d19b-3b74-9c9c-d0add06c69b1" [ 747.428805] env[62070]: _type = "Task" [ 747.428805] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.436362] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]529c1851-d19b-3b74-9c9c-d0add06c69b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.478079] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Acquiring lock "refresh_cache-a3fcb849-b015-43aa-8f95-0d4a87e2cecc" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 747.478226] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Acquired lock "refresh_cache-a3fcb849-b015-43aa-8f95-0d4a87e2cecc" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.478373] env[62070]: DEBUG nova.network.neutron [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 747.497386] env[62070]: DEBUG nova.compute.utils [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 747.501981] env[62070]: DEBUG nova.compute.manager [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 747.501981] env[62070]: DEBUG nova.network.neutron [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 747.539970] env[62070]: DEBUG nova.policy [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'db9baf29d0b5489da2657286bfd695c0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91e246e32f29422e90fae974cfee9d8f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 747.550303] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121544, 'name': PowerOnVM_Task, 'duration_secs': 0.732254} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.550553] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 747.550722] env[62070]: INFO nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Took 9.02 seconds to spawn the instance on the hypervisor. [ 747.550900] env[62070]: DEBUG nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 747.551641] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d01b1b1d-58e1-4d98-9c8a-7e8de35d4497 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.614329] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121545, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.339652} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.616880] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 747.617804] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82f90710-4ed6-4b00-90c6-46d87d540e31 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.633856] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.644434] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] 5a146d8f-6921-4b3e-8696-d2804fb855ba/5a146d8f-6921-4b3e-8696-d2804fb855ba.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 747.648972] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f8e0657-522c-4aeb-87be-1e7e6b56f7b9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.668964] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 747.668964] env[62070]: value = "task-1121546" [ 747.668964] env[62070]: _type = "Task" [ 747.668964] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.681536] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121546, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.820796] env[62070]: DEBUG nova.network.neutron [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Successfully created port: 60d8bbf4-bc22-4c62-8ddd-d3a60ee21d1b {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 747.844686] env[62070]: DEBUG oslo_concurrency.lockutils [req-8d61c982-46d8-4f36-8e53-c0df7b6b62e9 req-b08d39c4-1ecc-41e6-89c2-eb4f63f6b921 service nova] Releasing lock "refresh_cache-42a5c5d8-5c3a-4568-b212-d87f2951a334" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 747.924611] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-522e04d5-9aaf-4f2d-b667-013a8d277a17 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.949871] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff2a3957-627d-4852-8a25-3997aaca94a7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.957898] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]529c1851-d19b-3b74-9c9c-d0add06c69b1, 'name': SearchDatastore_Task, 'duration_secs': 0.058006} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.960414] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 747.960719] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 747.960921] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 747.961133] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.961254] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 747.961604] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Updating instance 'd148d561-3211-4f1f-965a-f2b14cd60b11' progress to 83 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 747.967711] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb177d19-b99f-403d-bc09-915c3a1b07c5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.990718] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 747.990718] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 747.991374] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-396564d2-a945-4147-a9cd-798005acdebe {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.999264] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Waiting for the task: (returnval){ [ 747.999264] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52a0b986-7615-718d-4c09-d4fe6a7a6e1e" [ 747.999264] env[62070]: _type = "Task" [ 747.999264] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.004854] env[62070]: DEBUG nova.compute.manager [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 748.017588] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52a0b986-7615-718d-4c09-d4fe6a7a6e1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.025720] env[62070]: DEBUG nova.network.neutron [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 748.046722] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b1e2775-c05e-4bd8-a76a-b82ce6bf6f4f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.057770] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e90fd97d-9d11-408c-b3a7-1a8328f2e388 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.068800] env[62070]: INFO nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Took 29.84 seconds to build instance. [ 748.099126] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "bcafa04d-904b-4eab-aba1-35180c2d4b22" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 113.738s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.102636] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0287e68-fbb2-469d-afd1-852dba945948 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.112387] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdca1d06-c75f-4dd3-b84f-3091437f558a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.127646] env[62070]: DEBUG nova.compute.provider_tree [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 748.179453] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121546, 'name': ReconfigVM_Task, 'duration_secs': 0.309669} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.179953] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Reconfigured VM instance instance-0000002b to attach disk [datastore1] 5a146d8f-6921-4b3e-8696-d2804fb855ba/5a146d8f-6921-4b3e-8696-d2804fb855ba.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 748.180706] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0bf67003-d24d-48c1-bdbe-6cb3b1f14b89 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.188472] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 748.188472] env[62070]: value = "task-1121547" [ 748.188472] env[62070]: _type = "Task" [ 748.188472] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.197157] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121547, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.225728] env[62070]: DEBUG nova.network.neutron [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Updating instance_info_cache with network_info: [{"id": "fcd8b688-1cad-4cae-a1db-2d606703425c", "address": "fa:16:3e:92:77:c7", "network": {"id": "84f186c9-b83c-4c32-8f89-5baa829e380d", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-38403900-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4b0335b052b4d3c80922314fa650b30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd8b688-1c", "ovs_interfaceid": "fcd8b688-1cad-4cae-a1db-2d606703425c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.471858] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 748.472196] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8b02459d-9077-4f7b-9430-4b5b4c0f19c1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.479912] env[62070]: DEBUG oslo_vmware.api [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Waiting for the task: (returnval){ [ 748.479912] env[62070]: value = "task-1121548" [ 748.479912] env[62070]: _type = "Task" [ 748.479912] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.487323] env[62070]: DEBUG oslo_vmware.api [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121548, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.512255] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52a0b986-7615-718d-4c09-d4fe6a7a6e1e, 'name': SearchDatastore_Task, 'duration_secs': 0.037201} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.513534] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-709d3a21-d8a4-4a96-8701-5d3f04c1456d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.519035] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Waiting for the task: (returnval){ [ 748.519035] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]528e86b1-e125-4493-eac4-4ff7ea2566d7" [ 748.519035] env[62070]: _type = "Task" [ 748.519035] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.526675] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]528e86b1-e125-4493-eac4-4ff7ea2566d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.602759] env[62070]: DEBUG nova.compute.manager [req-d704aaf8-2b24-45e8-ad95-4df22d061722 req-a7850a0b-58d0-4617-a17a-73e1342c8d6c service nova] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Received event network-vif-plugged-fcd8b688-1cad-4cae-a1db-2d606703425c {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 748.603021] env[62070]: DEBUG oslo_concurrency.lockutils [req-d704aaf8-2b24-45e8-ad95-4df22d061722 req-a7850a0b-58d0-4617-a17a-73e1342c8d6c service nova] Acquiring lock "a3fcb849-b015-43aa-8f95-0d4a87e2cecc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.603202] env[62070]: DEBUG oslo_concurrency.lockutils [req-d704aaf8-2b24-45e8-ad95-4df22d061722 req-a7850a0b-58d0-4617-a17a-73e1342c8d6c service nova] Lock "a3fcb849-b015-43aa-8f95-0d4a87e2cecc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.603367] env[62070]: DEBUG oslo_concurrency.lockutils [req-d704aaf8-2b24-45e8-ad95-4df22d061722 req-a7850a0b-58d0-4617-a17a-73e1342c8d6c service nova] Lock "a3fcb849-b015-43aa-8f95-0d4a87e2cecc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.603548] env[62070]: DEBUG nova.compute.manager [req-d704aaf8-2b24-45e8-ad95-4df22d061722 req-a7850a0b-58d0-4617-a17a-73e1342c8d6c service nova] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] No waiting events found dispatching network-vif-plugged-fcd8b688-1cad-4cae-a1db-2d606703425c {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 748.603716] env[62070]: WARNING nova.compute.manager [req-d704aaf8-2b24-45e8-ad95-4df22d061722 req-a7850a0b-58d0-4617-a17a-73e1342c8d6c service nova] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Received unexpected event network-vif-plugged-fcd8b688-1cad-4cae-a1db-2d606703425c for instance with vm_state building and task_state spawning. [ 748.603911] env[62070]: DEBUG nova.compute.manager [req-d704aaf8-2b24-45e8-ad95-4df22d061722 req-a7850a0b-58d0-4617-a17a-73e1342c8d6c service nova] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Received event network-changed-fcd8b688-1cad-4cae-a1db-2d606703425c {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 748.604087] env[62070]: DEBUG nova.compute.manager [req-d704aaf8-2b24-45e8-ad95-4df22d061722 req-a7850a0b-58d0-4617-a17a-73e1342c8d6c service nova] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Refreshing instance network info cache due to event network-changed-fcd8b688-1cad-4cae-a1db-2d606703425c. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 748.604253] env[62070]: DEBUG oslo_concurrency.lockutils [req-d704aaf8-2b24-45e8-ad95-4df22d061722 req-a7850a0b-58d0-4617-a17a-73e1342c8d6c service nova] Acquiring lock "refresh_cache-a3fcb849-b015-43aa-8f95-0d4a87e2cecc" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.607781] env[62070]: DEBUG nova.compute.manager [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 748.629930] env[62070]: DEBUG nova.scheduler.client.report [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 748.696903] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121547, 'name': Rename_Task, 'duration_secs': 0.150986} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.697208] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 748.697459] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ebc4375c-2e11-47a1-8cf1-231ef9b50c40 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.706868] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 748.706868] env[62070]: value = "task-1121549" [ 748.706868] env[62070]: _type = "Task" [ 748.706868] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.714384] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121549, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.728103] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Releasing lock "refresh_cache-a3fcb849-b015-43aa-8f95-0d4a87e2cecc" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.728398] env[62070]: DEBUG nova.compute.manager [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Instance network_info: |[{"id": "fcd8b688-1cad-4cae-a1db-2d606703425c", "address": "fa:16:3e:92:77:c7", "network": {"id": "84f186c9-b83c-4c32-8f89-5baa829e380d", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-38403900-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4b0335b052b4d3c80922314fa650b30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd8b688-1c", "ovs_interfaceid": "fcd8b688-1cad-4cae-a1db-2d606703425c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 748.729039] env[62070]: DEBUG oslo_concurrency.lockutils [req-d704aaf8-2b24-45e8-ad95-4df22d061722 req-a7850a0b-58d0-4617-a17a-73e1342c8d6c service nova] Acquired lock "refresh_cache-a3fcb849-b015-43aa-8f95-0d4a87e2cecc" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.729039] env[62070]: DEBUG nova.network.neutron [req-d704aaf8-2b24-45e8-ad95-4df22d061722 req-a7850a0b-58d0-4617-a17a-73e1342c8d6c service nova] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Refreshing network info cache for port fcd8b688-1cad-4cae-a1db-2d606703425c {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 748.730349] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:92:77:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '777870ab-362f-4a17-9c1c-8d9cc26cd4ce', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fcd8b688-1cad-4cae-a1db-2d606703425c', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 748.737937] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Creating folder: Project (c4b0335b052b4d3c80922314fa650b30). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 748.739306] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a243fc64-48b6-4a9a-9477-48d3a6f90666 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.750082] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Created folder: Project (c4b0335b052b4d3c80922314fa650b30) in parent group-v245319. [ 748.750571] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Creating folder: Instances. Parent ref: group-v245360. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 748.750571] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c01b7bd1-c4ef-4a97-a2d3-6fef9e7360c5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.760657] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Created folder: Instances in parent group-v245360. [ 748.760801] env[62070]: DEBUG oslo.service.loopingcall [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 748.762094] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 748.762094] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a8801a12-e462-48cf-aa45-35064064a3d1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.780414] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 748.780414] env[62070]: value = "task-1121552" [ 748.780414] env[62070]: _type = "Task" [ 748.780414] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.787664] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121552, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.992464] env[62070]: DEBUG oslo_vmware.api [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121548, 'name': PowerOnVM_Task, 'duration_secs': 0.484477} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.992795] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 748.993042] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-81043ab4-38b5-4edb-afd0-424b75f917e0 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Updating instance 'd148d561-3211-4f1f-965a-f2b14cd60b11' progress to 100 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 749.014529] env[62070]: DEBUG nova.compute.manager [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 749.030155] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]528e86b1-e125-4493-eac4-4ff7ea2566d7, 'name': SearchDatastore_Task, 'duration_secs': 0.008773} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.030425] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.030686] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 42a5c5d8-5c3a-4568-b212-d87f2951a334/42a5c5d8-5c3a-4568-b212-d87f2951a334.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 749.030950] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f4f2df44-558f-4342-b16c-c205cbf1f77d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.037924] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Waiting for the task: (returnval){ [ 749.037924] env[62070]: value = "task-1121553" [ 749.037924] env[62070]: _type = "Task" [ 749.037924] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.046015] env[62070]: DEBUG nova.virt.hardware [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 749.046395] env[62070]: DEBUG nova.virt.hardware [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 749.046647] env[62070]: DEBUG nova.virt.hardware [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 749.046943] env[62070]: DEBUG nova.virt.hardware [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 749.047197] env[62070]: DEBUG nova.virt.hardware [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 749.047434] env[62070]: DEBUG nova.virt.hardware [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 749.047754] env[62070]: DEBUG nova.virt.hardware [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 749.048026] env[62070]: DEBUG nova.virt.hardware [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 749.048289] env[62070]: DEBUG nova.virt.hardware [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 749.048563] env[62070]: DEBUG nova.virt.hardware [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 749.048842] env[62070]: DEBUG nova.virt.hardware [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 749.050048] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df7fabd8-e327-4ca4-9a8f-81e2e9ba80ff {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.061050] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Task: {'id': task-1121553, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.064260] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-060f8bf0-0a44-45ae-b799-16ddb118280a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.133267] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.135201] env[62070]: DEBUG oslo_concurrency.lockutils [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.141s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.139025] env[62070]: DEBUG oslo_concurrency.lockutils [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.163s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.140679] env[62070]: INFO nova.compute.claims [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 749.163826] env[62070]: INFO nova.scheduler.client.report [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Deleted allocations for instance 30d782e4-30c7-41f6-b30d-95a9a59cf83c [ 749.216450] env[62070]: DEBUG oslo_vmware.api [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121549, 'name': PowerOnVM_Task, 'duration_secs': 0.489912} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.217013] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 749.217097] env[62070]: INFO nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Took 8.08 seconds to spawn the instance on the hypervisor. [ 749.217264] env[62070]: DEBUG nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 749.218317] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d3adb0-a6ec-48ed-ab64-cad719391c2e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.296528] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121552, 'name': CreateVM_Task, 'duration_secs': 0.348905} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.296528] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 749.296528] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.296528] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.296802] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 749.297141] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-556e0724-426d-44b1-96bb-f0b0abe07133 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.302668] env[62070]: DEBUG oslo_vmware.api [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Waiting for the task: (returnval){ [ 749.302668] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]523e225c-3f0a-a92f-447b-ebf0a28538d7" [ 749.302668] env[62070]: _type = "Task" [ 749.302668] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.320447] env[62070]: DEBUG oslo_vmware.api [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]523e225c-3f0a-a92f-447b-ebf0a28538d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.548739] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Task: {'id': task-1121553, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.602015] env[62070]: DEBUG nova.network.neutron [req-d704aaf8-2b24-45e8-ad95-4df22d061722 req-a7850a0b-58d0-4617-a17a-73e1342c8d6c service nova] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Updated VIF entry in instance network info cache for port fcd8b688-1cad-4cae-a1db-2d606703425c. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 749.602453] env[62070]: DEBUG nova.network.neutron [req-d704aaf8-2b24-45e8-ad95-4df22d061722 req-a7850a0b-58d0-4617-a17a-73e1342c8d6c service nova] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Updating instance_info_cache with network_info: [{"id": "fcd8b688-1cad-4cae-a1db-2d606703425c", "address": "fa:16:3e:92:77:c7", "network": {"id": "84f186c9-b83c-4c32-8f89-5baa829e380d", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-38403900-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4b0335b052b4d3c80922314fa650b30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd8b688-1c", "ovs_interfaceid": "fcd8b688-1cad-4cae-a1db-2d606703425c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.621910] env[62070]: DEBUG nova.network.neutron [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Successfully updated port: 60d8bbf4-bc22-4c62-8ddd-d3a60ee21d1b {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 749.672398] env[62070]: DEBUG oslo_concurrency.lockutils [None req-35085f63-b149-4a7f-b35a-5da7bb134d28 tempest-ServerTagsTestJSON-107248279 tempest-ServerTagsTestJSON-107248279-project-member] Lock "30d782e4-30c7-41f6-b30d-95a9a59cf83c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.027s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.739625] env[62070]: INFO nova.compute.manager [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Took 29.06 seconds to build instance. [ 749.813979] env[62070]: DEBUG oslo_vmware.api [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]523e225c-3f0a-a92f-447b-ebf0a28538d7, 'name': SearchDatastore_Task, 'duration_secs': 0.021975} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.814305] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.814533] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 749.814801] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.814950] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.815145] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 749.815406] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8fcf82b2-ce80-4b42-a6c7-93cafa707df6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.828413] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 749.828613] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 749.829390] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9577fcf4-247d-4dc1-8a9a-031e6cc4dd6a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.835077] env[62070]: DEBUG oslo_vmware.api [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Waiting for the task: (returnval){ [ 749.835077] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5250e735-3857-bd65-c04f-170f2af456e4" [ 749.835077] env[62070]: _type = "Task" [ 749.835077] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.842922] env[62070]: DEBUG oslo_vmware.api [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5250e735-3857-bd65-c04f-170f2af456e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.052971] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Task: {'id': task-1121553, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.546396} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.053270] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 42a5c5d8-5c3a-4568-b212-d87f2951a334/42a5c5d8-5c3a-4568-b212-d87f2951a334.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 750.053498] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 750.053781] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-68c9bc45-c357-4d2b-8789-038a08290031 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.060373] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Waiting for the task: (returnval){ [ 750.060373] env[62070]: value = "task-1121554" [ 750.060373] env[62070]: _type = "Task" [ 750.060373] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.069197] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Task: {'id': task-1121554, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.106792] env[62070]: DEBUG oslo_concurrency.lockutils [req-d704aaf8-2b24-45e8-ad95-4df22d061722 req-a7850a0b-58d0-4617-a17a-73e1342c8d6c service nova] Releasing lock "refresh_cache-a3fcb849-b015-43aa-8f95-0d4a87e2cecc" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.124728] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "refresh_cache-a3c42653-9a4b-42d3-bc38-8d46d95c8f64" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.124883] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired lock "refresh_cache-a3c42653-9a4b-42d3-bc38-8d46d95c8f64" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.125156] env[62070]: DEBUG nova.network.neutron [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 750.242438] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6e6f7554-d128-4ba4-b8f9-499cd17315ad tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "5a146d8f-6921-4b3e-8696-d2804fb855ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 115.854s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.349442] env[62070]: DEBUG oslo_vmware.api [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5250e735-3857-bd65-c04f-170f2af456e4, 'name': SearchDatastore_Task, 'duration_secs': 0.040062} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.350880] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85d5748e-183b-4747-8eb5-1c5e8081313f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.358472] env[62070]: DEBUG oslo_vmware.api [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Waiting for the task: (returnval){ [ 750.358472] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52614fb8-b500-06c8-7fd1-a107f055251c" [ 750.358472] env[62070]: _type = "Task" [ 750.358472] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.366577] env[62070]: DEBUG oslo_vmware.api [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52614fb8-b500-06c8-7fd1-a107f055251c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.570539] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Task: {'id': task-1121554, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075864} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.573050] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 750.574164] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c90dc2f-b445-4d8c-a6fb-b914213e354e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.597913] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] 42a5c5d8-5c3a-4568-b212-d87f2951a334/42a5c5d8-5c3a-4568-b212-d87f2951a334.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 750.600972] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-007fb006-5f2f-4964-9c5f-587422e4a3e0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.621437] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Waiting for the task: (returnval){ [ 750.621437] env[62070]: value = "task-1121555" [ 750.621437] env[62070]: _type = "Task" [ 750.621437] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.636258] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Task: {'id': task-1121555, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.643651] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e480993f-9886-4f4c-a623-e2409ea9ce34 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.652398] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ad9f61-d57b-4105-a3f3-3a0c4c308308 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.683753] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd87906c-9ff5-40e5-ae2e-e51a9cf3ceb3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.689019] env[62070]: DEBUG nova.network.neutron [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 750.693902] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3316b0b-714d-4e40-9658-05b7a749ef6a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.707486] env[62070]: DEBUG nova.compute.provider_tree [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 750.745232] env[62070]: DEBUG nova.compute.manager [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 750.799360] env[62070]: DEBUG nova.compute.manager [req-ce849618-982d-4138-a9ec-57ed7d72e038 req-6632a7e8-653f-437b-8e98-59f94598a47a service nova] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Received event network-vif-plugged-60d8bbf4-bc22-4c62-8ddd-d3a60ee21d1b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 750.799360] env[62070]: DEBUG oslo_concurrency.lockutils [req-ce849618-982d-4138-a9ec-57ed7d72e038 req-6632a7e8-653f-437b-8e98-59f94598a47a service nova] Acquiring lock "a3c42653-9a4b-42d3-bc38-8d46d95c8f64-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.799360] env[62070]: DEBUG oslo_concurrency.lockutils [req-ce849618-982d-4138-a9ec-57ed7d72e038 req-6632a7e8-653f-437b-8e98-59f94598a47a service nova] Lock "a3c42653-9a4b-42d3-bc38-8d46d95c8f64-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.799663] env[62070]: DEBUG oslo_concurrency.lockutils [req-ce849618-982d-4138-a9ec-57ed7d72e038 req-6632a7e8-653f-437b-8e98-59f94598a47a service nova] Lock "a3c42653-9a4b-42d3-bc38-8d46d95c8f64-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.799663] env[62070]: DEBUG nova.compute.manager [req-ce849618-982d-4138-a9ec-57ed7d72e038 req-6632a7e8-653f-437b-8e98-59f94598a47a service nova] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] No waiting events found dispatching network-vif-plugged-60d8bbf4-bc22-4c62-8ddd-d3a60ee21d1b {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 750.799935] env[62070]: WARNING nova.compute.manager [req-ce849618-982d-4138-a9ec-57ed7d72e038 req-6632a7e8-653f-437b-8e98-59f94598a47a service nova] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Received unexpected event network-vif-plugged-60d8bbf4-bc22-4c62-8ddd-d3a60ee21d1b for instance with vm_state building and task_state spawning. [ 750.799935] env[62070]: DEBUG nova.compute.manager [req-ce849618-982d-4138-a9ec-57ed7d72e038 req-6632a7e8-653f-437b-8e98-59f94598a47a service nova] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Received event network-changed-60d8bbf4-bc22-4c62-8ddd-d3a60ee21d1b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 750.800115] env[62070]: DEBUG nova.compute.manager [req-ce849618-982d-4138-a9ec-57ed7d72e038 req-6632a7e8-653f-437b-8e98-59f94598a47a service nova] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Refreshing instance network info cache due to event network-changed-60d8bbf4-bc22-4c62-8ddd-d3a60ee21d1b. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 750.800907] env[62070]: DEBUG oslo_concurrency.lockutils [req-ce849618-982d-4138-a9ec-57ed7d72e038 req-6632a7e8-653f-437b-8e98-59f94598a47a service nova] Acquiring lock "refresh_cache-a3c42653-9a4b-42d3-bc38-8d46d95c8f64" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.870866] env[62070]: DEBUG oslo_vmware.api [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52614fb8-b500-06c8-7fd1-a107f055251c, 'name': SearchDatastore_Task, 'duration_secs': 0.046381} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.871202] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.871514] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] a3fcb849-b015-43aa-8f95-0d4a87e2cecc/a3fcb849-b015-43aa-8f95-0d4a87e2cecc.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 750.872125] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-74bd6121-ba9c-447c-8e91-fbab56bfadef {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.879416] env[62070]: DEBUG oslo_vmware.api [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Waiting for the task: (returnval){ [ 750.879416] env[62070]: value = "task-1121556" [ 750.879416] env[62070]: _type = "Task" [ 750.879416] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.889745] env[62070]: DEBUG oslo_vmware.api [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Task: {'id': task-1121556, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.907335] env[62070]: DEBUG nova.network.neutron [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Updating instance_info_cache with network_info: [{"id": "60d8bbf4-bc22-4c62-8ddd-d3a60ee21d1b", "address": "fa:16:3e:92:7a:b8", "network": {"id": "516790be-56b8-409d-b1c0-a8683a45a9ec", "bridge": "br-int", "label": "tempest-ServersTestJSON-693737631-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91e246e32f29422e90fae974cfee9d8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60d8bbf4-bc", "ovs_interfaceid": "60d8bbf4-bc22-4c62-8ddd-d3a60ee21d1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.131793] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Task: {'id': task-1121555, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.216187] env[62070]: DEBUG nova.scheduler.client.report [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 751.270419] env[62070]: DEBUG oslo_concurrency.lockutils [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.391634] env[62070]: DEBUG oslo_vmware.api [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Task: {'id': task-1121556, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.410157] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Releasing lock "refresh_cache-a3c42653-9a4b-42d3-bc38-8d46d95c8f64" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.412543] env[62070]: DEBUG nova.compute.manager [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Instance network_info: |[{"id": "60d8bbf4-bc22-4c62-8ddd-d3a60ee21d1b", "address": "fa:16:3e:92:7a:b8", "network": {"id": "516790be-56b8-409d-b1c0-a8683a45a9ec", "bridge": "br-int", "label": "tempest-ServersTestJSON-693737631-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91e246e32f29422e90fae974cfee9d8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60d8bbf4-bc", "ovs_interfaceid": "60d8bbf4-bc22-4c62-8ddd-d3a60ee21d1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 751.412543] env[62070]: DEBUG oslo_concurrency.lockutils [req-ce849618-982d-4138-a9ec-57ed7d72e038 req-6632a7e8-653f-437b-8e98-59f94598a47a service nova] Acquired lock "refresh_cache-a3c42653-9a4b-42d3-bc38-8d46d95c8f64" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.412740] env[62070]: DEBUG nova.network.neutron [req-ce849618-982d-4138-a9ec-57ed7d72e038 req-6632a7e8-653f-437b-8e98-59f94598a47a service nova] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Refreshing network info cache for port 60d8bbf4-bc22-4c62-8ddd-d3a60ee21d1b {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 751.412740] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:92:7a:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '359850cc-b061-4c9c-a61c-eb42e0f7c359', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '60d8bbf4-bc22-4c62-8ddd-d3a60ee21d1b', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 751.421504] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Creating folder: Project (91e246e32f29422e90fae974cfee9d8f). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 751.422819] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ae1e5a67-7ff4-4a34-ba97-98636649e780 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.436604] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Created folder: Project (91e246e32f29422e90fae974cfee9d8f) in parent group-v245319. [ 751.436869] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Creating folder: Instances. Parent ref: group-v245363. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 751.437144] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c37a07ef-e998-4c3f-9cca-e894dfa9acb2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.446176] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Created folder: Instances in parent group-v245363. [ 751.446451] env[62070]: DEBUG oslo.service.loopingcall [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 751.446656] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 751.446888] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dba82d1c-6e98-4a3a-ab02-1580ccb7739a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.471450] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 751.471450] env[62070]: value = "task-1121559" [ 751.471450] env[62070]: _type = "Task" [ 751.471450] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.481417] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121559, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.633479] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Task: {'id': task-1121555, 'name': ReconfigVM_Task, 'duration_secs': 0.564319} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.633849] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Reconfigured VM instance instance-0000002c to attach disk [datastore1] 42a5c5d8-5c3a-4568-b212-d87f2951a334/42a5c5d8-5c3a-4568-b212-d87f2951a334.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 751.634475] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b2d4635e-afdd-4068-9f61-9685c38bfd82 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.641648] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Waiting for the task: (returnval){ [ 751.641648] env[62070]: value = "task-1121560" [ 751.641648] env[62070]: _type = "Task" [ 751.641648] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.650304] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Task: {'id': task-1121560, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.725725] env[62070]: DEBUG oslo_concurrency.lockutils [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.588s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 751.726156] env[62070]: DEBUG nova.compute.manager [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 751.732027] env[62070]: DEBUG oslo_concurrency.lockutils [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.545s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.732721] env[62070]: INFO nova.compute.claims [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 751.769031] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquiring lock "fe378560-40b8-42c9-840d-b7d60de87c4d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.769340] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "fe378560-40b8-42c9-840d-b7d60de87c4d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.769557] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquiring lock "fe378560-40b8-42c9-840d-b7d60de87c4d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.769863] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "fe378560-40b8-42c9-840d-b7d60de87c4d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.770080] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "fe378560-40b8-42c9-840d-b7d60de87c4d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 751.773993] env[62070]: INFO nova.compute.manager [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Terminating instance [ 751.775964] env[62070]: DEBUG nova.compute.manager [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 751.776276] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 751.777670] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad1cb358-2bfb-4c37-9e19-706b78b90917 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.788221] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 751.788221] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d35f8e2a-b3a1-49a0-8d4b-47755a4ba325 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.794993] env[62070]: DEBUG oslo_vmware.api [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 751.794993] env[62070]: value = "task-1121561" [ 751.794993] env[62070]: _type = "Task" [ 751.794993] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.799100] env[62070]: DEBUG nova.network.neutron [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Port c5e6098a-ebbb-4eee-ba72-4ddaad679830 binding to destination host cpu-1 is already ACTIVE {{(pid=62070) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 751.799363] env[62070]: DEBUG oslo_concurrency.lockutils [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquiring lock "refresh_cache-d148d561-3211-4f1f-965a-f2b14cd60b11" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.799575] env[62070]: DEBUG oslo_concurrency.lockutils [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquired lock "refresh_cache-d148d561-3211-4f1f-965a-f2b14cd60b11" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.799727] env[62070]: DEBUG nova.network.neutron [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 751.806869] env[62070]: DEBUG oslo_vmware.api [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121561, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.892263] env[62070]: DEBUG oslo_vmware.api [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Task: {'id': task-1121556, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.57691} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.892656] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] a3fcb849-b015-43aa-8f95-0d4a87e2cecc/a3fcb849-b015-43aa-8f95-0d4a87e2cecc.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 751.892986] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 751.893393] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-22f336e0-bdc8-46c2-9aa4-d36a43452dce {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.900954] env[62070]: DEBUG oslo_vmware.api [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Waiting for the task: (returnval){ [ 751.900954] env[62070]: value = "task-1121562" [ 751.900954] env[62070]: _type = "Task" [ 751.900954] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.912378] env[62070]: DEBUG oslo_vmware.api [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Task: {'id': task-1121562, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.980582] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121559, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.156710] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Task: {'id': task-1121560, 'name': Rename_Task, 'duration_secs': 0.227841} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.161571] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 752.161571] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-523cda8f-5a77-4b07-a0c3-c42b6504b0c8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.164142] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Waiting for the task: (returnval){ [ 752.164142] env[62070]: value = "task-1121563" [ 752.164142] env[62070]: _type = "Task" [ 752.164142] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.171421] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Task: {'id': task-1121563, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.238344] env[62070]: DEBUG nova.compute.utils [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 752.241498] env[62070]: DEBUG nova.compute.manager [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 752.241641] env[62070]: DEBUG nova.network.neutron [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 752.281668] env[62070]: DEBUG nova.policy [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '95965cd2b7664d2ab7441f194cda36f7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7ded80f14852431187bd5066e4c42d71', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 752.308230] env[62070]: DEBUG oslo_vmware.api [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121561, 'name': PowerOffVM_Task, 'duration_secs': 0.255367} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.308919] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 752.308919] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 752.309917] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bf3a6f1a-ba8a-401e-8afe-9ef1c38c446b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.378600] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 752.378818] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 752.379020] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Deleting the datastore file [datastore2] fe378560-40b8-42c9-840d-b7d60de87c4d {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 752.379299] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dbd12a94-91ba-4d85-a16f-cec0e7b5da4f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.386096] env[62070]: DEBUG oslo_vmware.api [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 752.386096] env[62070]: value = "task-1121565" [ 752.386096] env[62070]: _type = "Task" [ 752.386096] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.394805] env[62070]: DEBUG oslo_vmware.api [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121565, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.410187] env[62070]: DEBUG oslo_vmware.api [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Task: {'id': task-1121562, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076943} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.412418] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 752.413240] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8147131-8298-4866-a84b-88a75f3eddfb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.438534] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Reconfiguring VM instance instance-0000002d to attach disk [datastore2] a3fcb849-b015-43aa-8f95-0d4a87e2cecc/a3fcb849-b015-43aa-8f95-0d4a87e2cecc.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 752.438534] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f79efbc2-fe30-4f6a-92da-c75a803765c1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.454088] env[62070]: DEBUG nova.network.neutron [req-ce849618-982d-4138-a9ec-57ed7d72e038 req-6632a7e8-653f-437b-8e98-59f94598a47a service nova] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Updated VIF entry in instance network info cache for port 60d8bbf4-bc22-4c62-8ddd-d3a60ee21d1b. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 752.455024] env[62070]: DEBUG nova.network.neutron [req-ce849618-982d-4138-a9ec-57ed7d72e038 req-6632a7e8-653f-437b-8e98-59f94598a47a service nova] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Updating instance_info_cache with network_info: [{"id": "60d8bbf4-bc22-4c62-8ddd-d3a60ee21d1b", "address": "fa:16:3e:92:7a:b8", "network": {"id": "516790be-56b8-409d-b1c0-a8683a45a9ec", "bridge": "br-int", "label": "tempest-ServersTestJSON-693737631-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91e246e32f29422e90fae974cfee9d8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60d8bbf4-bc", "ovs_interfaceid": "60d8bbf4-bc22-4c62-8ddd-d3a60ee21d1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.465169] env[62070]: DEBUG oslo_vmware.api [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Waiting for the task: (returnval){ [ 752.465169] env[62070]: value = "task-1121566" [ 752.465169] env[62070]: _type = "Task" [ 752.465169] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.478849] env[62070]: DEBUG oslo_vmware.api [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Task: {'id': task-1121566, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.486437] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121559, 'name': CreateVM_Task, 'duration_secs': 0.573033} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.488867] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 752.490441] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 752.490628] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.491129] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 752.491991] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-baf2a86d-8083-4965-b037-9da0432ddf4a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.497415] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 752.497415] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52998e5e-8f84-d6fc-40af-d385528e3169" [ 752.497415] env[62070]: _type = "Task" [ 752.497415] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.505839] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52998e5e-8f84-d6fc-40af-d385528e3169, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.674707] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Task: {'id': task-1121563, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.750960] env[62070]: DEBUG nova.compute.manager [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 752.770616] env[62070]: DEBUG nova.network.neutron [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Updating instance_info_cache with network_info: [{"id": "c5e6098a-ebbb-4eee-ba72-4ddaad679830", "address": "fa:16:3e:7b:ab:3c", "network": {"id": "df33a08d-88db-4a22-846f-5b414705fc65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9d42cb2bbadf40d6b35f237f71234611", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5e6098a-eb", "ovs_interfaceid": "c5e6098a-ebbb-4eee-ba72-4ddaad679830", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.897058] env[62070]: DEBUG oslo_vmware.api [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121565, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.322829} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.897058] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 752.897058] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 752.897058] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 752.897281] env[62070]: INFO nova.compute.manager [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Took 1.12 seconds to destroy the instance on the hypervisor. [ 752.897403] env[62070]: DEBUG oslo.service.loopingcall [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 752.898338] env[62070]: DEBUG nova.compute.manager [-] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 752.898338] env[62070]: DEBUG nova.network.neutron [-] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 752.914408] env[62070]: DEBUG nova.network.neutron [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Successfully created port: f9ec7c6a-7ed9-4d9d-9e32-c182bc79b903 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 752.959246] env[62070]: DEBUG oslo_concurrency.lockutils [req-ce849618-982d-4138-a9ec-57ed7d72e038 req-6632a7e8-653f-437b-8e98-59f94598a47a service nova] Releasing lock "refresh_cache-a3c42653-9a4b-42d3-bc38-8d46d95c8f64" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.976246] env[62070]: DEBUG oslo_vmware.api [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Task: {'id': task-1121566, 'name': ReconfigVM_Task, 'duration_secs': 0.293598} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.976662] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Reconfigured VM instance instance-0000002d to attach disk [datastore2] a3fcb849-b015-43aa-8f95-0d4a87e2cecc/a3fcb849-b015-43aa-8f95-0d4a87e2cecc.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 752.978213] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3fcec29d-75d6-4d21-8fe6-41bd805b2a64 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.987449] env[62070]: DEBUG oslo_vmware.api [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Waiting for the task: (returnval){ [ 752.987449] env[62070]: value = "task-1121567" [ 752.987449] env[62070]: _type = "Task" [ 752.987449] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.998777] env[62070]: DEBUG oslo_vmware.api [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Task: {'id': task-1121567, 'name': Rename_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.015889] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52998e5e-8f84-d6fc-40af-d385528e3169, 'name': SearchDatastore_Task, 'duration_secs': 0.019577} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.017506] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.018046] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 753.018393] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.018627] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.018909] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 753.022536] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d7773ea-c6c9-469b-8329-96c2e2bfcfd2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.035995] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 753.036235] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 753.041110] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f092f93a-3e87-40f2-b619-9199932a1245 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.049837] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 753.049837] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52612b3d-ff4b-1d32-cecf-11e2cb20232b" [ 753.049837] env[62070]: _type = "Task" [ 753.049837] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.062699] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52612b3d-ff4b-1d32-cecf-11e2cb20232b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.177788] env[62070]: DEBUG oslo_vmware.api [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Task: {'id': task-1121563, 'name': PowerOnVM_Task, 'duration_secs': 0.906728} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.177788] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 753.177788] env[62070]: INFO nova.compute.manager [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Took 9.48 seconds to spawn the instance on the hypervisor. [ 753.177788] env[62070]: DEBUG nova.compute.manager [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 753.178636] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa4cd3e-5961-4fbc-93b8-436b1eb776d9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.258264] env[62070]: INFO nova.virt.block_device [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Booting with volume b5d7c242-16d7-4aff-81f3-626f9f878e94 at /dev/sda [ 753.283815] env[62070]: DEBUG oslo_concurrency.lockutils [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Releasing lock "refresh_cache-d148d561-3211-4f1f-965a-f2b14cd60b11" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.340863] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3688fefc-937c-447e-a47c-70fa1167a5dd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.351420] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac358269-0fd1-4b9b-b0f1-7537df89263d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.364224] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d90a4f24-f838-41b8-8ba9-34acf397c3c5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.367743] env[62070]: DEBUG nova.compute.manager [req-4dd77b6f-2226-45a5-bf1b-5e35a9acca55 req-f75acc4c-54c2-4d4d-9717-f0dde54a7fd3 service nova] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Received event network-vif-deleted-f3e119aa-83f9-4e1d-b333-01b5bf1a1873 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 753.367974] env[62070]: INFO nova.compute.manager [req-4dd77b6f-2226-45a5-bf1b-5e35a9acca55 req-f75acc4c-54c2-4d4d-9717-f0dde54a7fd3 service nova] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Neutron deleted interface f3e119aa-83f9-4e1d-b333-01b5bf1a1873; detaching it from the instance and deleting it from the info cache [ 753.368195] env[62070]: DEBUG nova.network.neutron [req-4dd77b6f-2226-45a5-bf1b-5e35a9acca55 req-f75acc4c-54c2-4d4d-9717-f0dde54a7fd3 service nova] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.375888] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0908498-86b2-420d-b095-cb906721d21e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.388903] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e2a088ab-8894-4759-bbf5-8bbd7b45f702 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.415108] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d72709-1b7e-4908-83ef-224840e58b1d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.422828] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dca3dd36-3694-42e5-84d6-f82b5299057e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.438594] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f48c7a-c5c1-411a-96df-55a5a53aa123 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.452380] env[62070]: DEBUG nova.compute.provider_tree [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 753.463581] env[62070]: DEBUG nova.scheduler.client.report [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 753.467070] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f5b0435-fcef-4607-bf19-e3f4e7385b5d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.474110] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6b0ed6e-1c54-4777-90e1-0bcc43948e4f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.488065] env[62070]: DEBUG nova.virt.block_device [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Updating existing volume attachment record: 5328e075-ba11-47da-adaf-60aebd437e55 {{(pid=62070) _volume_attach /opt/stack/nova/nova/virt/block_device.py:679}} [ 753.499612] env[62070]: DEBUG oslo_vmware.api [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Task: {'id': task-1121567, 'name': Rename_Task, 'duration_secs': 0.306544} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.499883] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 753.500137] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6277701e-e80a-4250-8665-4e9635346050 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.506725] env[62070]: DEBUG oslo_vmware.api [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Waiting for the task: (returnval){ [ 753.506725] env[62070]: value = "task-1121568" [ 753.506725] env[62070]: _type = "Task" [ 753.506725] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.513705] env[62070]: DEBUG oslo_vmware.api [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Task: {'id': task-1121568, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.560063] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52612b3d-ff4b-1d32-cecf-11e2cb20232b, 'name': SearchDatastore_Task, 'duration_secs': 0.012505} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.560838] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9070c2ba-94ba-4078-bbfe-1fe8388e1496 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.569645] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 753.569645] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52450013-cec6-e820-259d-33d9e6a14d8d" [ 753.569645] env[62070]: _type = "Task" [ 753.569645] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.575679] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52450013-cec6-e820-259d-33d9e6a14d8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.696192] env[62070]: INFO nova.compute.manager [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Took 27.43 seconds to build instance. [ 753.787123] env[62070]: DEBUG nova.compute.manager [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62070) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:897}} [ 753.787395] env[62070]: DEBUG oslo_concurrency.lockutils [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.821909] env[62070]: DEBUG nova.network.neutron [-] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.872230] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f9604f0a-d071-4e6d-861b-60814c41c833 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.882184] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f9c8fd-b9df-48db-945c-24571487122c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.915211] env[62070]: DEBUG nova.compute.manager [req-4dd77b6f-2226-45a5-bf1b-5e35a9acca55 req-f75acc4c-54c2-4d4d-9717-f0dde54a7fd3 service nova] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Detach interface failed, port_id=f3e119aa-83f9-4e1d-b333-01b5bf1a1873, reason: Instance fe378560-40b8-42c9-840d-b7d60de87c4d could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 753.971064] env[62070]: DEBUG oslo_concurrency.lockutils [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.240s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.971303] env[62070]: DEBUG nova.compute.manager [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 753.974102] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.940s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.975619] env[62070]: INFO nova.compute.claims [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 754.017750] env[62070]: DEBUG oslo_vmware.api [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Task: {'id': task-1121568, 'name': PowerOnVM_Task, 'duration_secs': 0.474829} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.017750] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 754.017750] env[62070]: INFO nova.compute.manager [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Took 7.59 seconds to spawn the instance on the hypervisor. [ 754.017750] env[62070]: DEBUG nova.compute.manager [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 754.018531] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9979c325-281f-4685-9bde-abe9a87cc2fb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.078225] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52450013-cec6-e820-259d-33d9e6a14d8d, 'name': SearchDatastore_Task, 'duration_secs': 0.017198} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.078534] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 754.078769] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] a3c42653-9a4b-42d3-bc38-8d46d95c8f64/a3c42653-9a4b-42d3-bc38-8d46d95c8f64.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 754.079053] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-28baee18-e434-4380-8a53-06368ed37cb6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.086323] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 754.086323] env[62070]: value = "task-1121569" [ 754.086323] env[62070]: _type = "Task" [ 754.086323] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.096232] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121569, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.198332] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b307d371-7291-4b27-bac8-b0cebaeb9acc tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Lock "42a5c5d8-5c3a-4568-b212-d87f2951a334" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 112.048s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.264053] env[62070]: DEBUG oslo_concurrency.lockutils [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Acquiring lock "42a5c5d8-5c3a-4568-b212-d87f2951a334" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.264053] env[62070]: DEBUG oslo_concurrency.lockutils [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Lock "42a5c5d8-5c3a-4568-b212-d87f2951a334" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.264053] env[62070]: DEBUG oslo_concurrency.lockutils [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Acquiring lock "42a5c5d8-5c3a-4568-b212-d87f2951a334-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.264053] env[62070]: DEBUG oslo_concurrency.lockutils [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Lock "42a5c5d8-5c3a-4568-b212-d87f2951a334-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.264335] env[62070]: DEBUG oslo_concurrency.lockutils [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Lock "42a5c5d8-5c3a-4568-b212-d87f2951a334-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.266362] env[62070]: INFO nova.compute.manager [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Terminating instance [ 754.268781] env[62070]: DEBUG nova.compute.manager [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 754.268972] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 754.269864] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d3471d-d64b-4ae3-880d-15061f9176de {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.277516] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 754.277689] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e7b3f3c9-0a70-4ff2-95c6-f5b74543390a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.284203] env[62070]: DEBUG oslo_vmware.api [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Waiting for the task: (returnval){ [ 754.284203] env[62070]: value = "task-1121570" [ 754.284203] env[62070]: _type = "Task" [ 754.284203] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.292085] env[62070]: DEBUG oslo_vmware.api [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Task: {'id': task-1121570, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.324780] env[62070]: INFO nova.compute.manager [-] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Took 1.43 seconds to deallocate network for instance. [ 754.480315] env[62070]: DEBUG nova.compute.utils [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 754.486989] env[62070]: DEBUG nova.compute.manager [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 754.486989] env[62070]: DEBUG nova.network.neutron [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 754.538359] env[62070]: INFO nova.compute.manager [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Took 27.10 seconds to build instance. [ 754.582298] env[62070]: DEBUG nova.policy [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f866f97eed1a41b39b4cd552102c6e21', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9191f0e6c2ee401abca64c0780e230bf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 754.598982] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121569, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.706745] env[62070]: DEBUG nova.compute.manager [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 754.795968] env[62070]: DEBUG oslo_vmware.api [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Task: {'id': task-1121570, 'name': PowerOffVM_Task, 'duration_secs': 0.198657} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.796287] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 754.796527] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 754.796880] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-837e431f-f8c1-4f2c-b427-d9096047f28f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.831980] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.877935] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 754.878194] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 754.878376] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Deleting the datastore file [datastore1] 42a5c5d8-5c3a-4568-b212-d87f2951a334 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 754.878749] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8c8c2631-f497-4957-b1ff-f3ea8f01d936 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.886319] env[62070]: DEBUG oslo_vmware.api [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Waiting for the task: (returnval){ [ 754.886319] env[62070]: value = "task-1121572" [ 754.886319] env[62070]: _type = "Task" [ 754.886319] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.892953] env[62070]: DEBUG nova.network.neutron [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Successfully updated port: f9ec7c6a-7ed9-4d9d-9e32-c182bc79b903 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 754.899118] env[62070]: DEBUG oslo_vmware.api [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Task: {'id': task-1121572, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.986718] env[62070]: DEBUG nova.compute.manager [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 755.040069] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3add793a-e278-4147-906b-0dddec8a7d40 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Lock "a3fcb849-b015-43aa-8f95-0d4a87e2cecc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.970s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.079487] env[62070]: DEBUG nova.network.neutron [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Successfully created port: 6d2286a1-36df-47b8-93e7-424a5f04ce29 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 755.098763] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121569, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.68885} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.099038] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] a3c42653-9a4b-42d3-bc38-8d46d95c8f64/a3c42653-9a4b-42d3-bc38-8d46d95c8f64.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 755.099252] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 755.099487] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e8857174-c0c5-487e-b947-615235859a72 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.106135] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 755.106135] env[62070]: value = "task-1121573" [ 755.106135] env[62070]: _type = "Task" [ 755.106135] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.117203] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121573, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.226845] env[62070]: DEBUG oslo_concurrency.lockutils [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.405097] env[62070]: DEBUG oslo_concurrency.lockutils [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Acquiring lock "refresh_cache-dd5d90e8-964a-4e1c-a98a-bcba37a1d79e" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 755.405097] env[62070]: DEBUG oslo_concurrency.lockutils [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Acquired lock "refresh_cache-dd5d90e8-964a-4e1c-a98a-bcba37a1d79e" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.405097] env[62070]: DEBUG nova.network.neutron [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 755.408337] env[62070]: DEBUG oslo_vmware.api [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Task: {'id': task-1121572, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.271456} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.411254] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 755.411450] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 755.411608] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 755.412387] env[62070]: INFO nova.compute.manager [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Took 1.14 seconds to destroy the instance on the hypervisor. [ 755.412387] env[62070]: DEBUG oslo.service.loopingcall [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 755.413956] env[62070]: DEBUG nova.compute.manager [-] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 755.413956] env[62070]: DEBUG nova.network.neutron [-] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 755.435566] env[62070]: DEBUG oslo_concurrency.lockutils [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Acquiring lock "a3fcb849-b015-43aa-8f95-0d4a87e2cecc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.435928] env[62070]: DEBUG oslo_concurrency.lockutils [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Lock "a3fcb849-b015-43aa-8f95-0d4a87e2cecc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 755.436255] env[62070]: DEBUG oslo_concurrency.lockutils [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Acquiring lock "a3fcb849-b015-43aa-8f95-0d4a87e2cecc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.436492] env[62070]: DEBUG oslo_concurrency.lockutils [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Lock "a3fcb849-b015-43aa-8f95-0d4a87e2cecc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 755.436733] env[62070]: DEBUG oslo_concurrency.lockutils [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Lock "a3fcb849-b015-43aa-8f95-0d4a87e2cecc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.439013] env[62070]: INFO nova.compute.manager [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Terminating instance [ 755.440871] env[62070]: DEBUG nova.compute.manager [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 755.441135] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 755.442039] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825a3f2c-87d3-41c0-b739-acabd256410b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.452718] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 755.452984] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2b17fd6a-4847-4ed6-b102-8d4b0de8897c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.461042] env[62070]: DEBUG oslo_vmware.api [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Waiting for the task: (returnval){ [ 755.461042] env[62070]: value = "task-1121574" [ 755.461042] env[62070]: _type = "Task" [ 755.461042] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.473388] env[62070]: DEBUG oslo_vmware.api [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Task: {'id': task-1121574, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.475378] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab57284-bf57-4554-9b9e-2b109d84c281 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.482685] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cfc7e7c-3593-4427-bff4-995aaeb1cd63 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.519972] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a55a746c-430c-4c1e-a0b3-de165479a738 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.529473] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5078ad9-d3c2-4166-8e75-1eb878c6fa8f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.537480] env[62070]: DEBUG nova.compute.manager [req-eb31445d-53b3-4bf0-a8df-ed840ebf358b req-060f0d76-6a3d-4ff1-bd54-15e42c65afa3 service nova] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Received event network-vif-plugged-f9ec7c6a-7ed9-4d9d-9e32-c182bc79b903 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 755.537756] env[62070]: DEBUG oslo_concurrency.lockutils [req-eb31445d-53b3-4bf0-a8df-ed840ebf358b req-060f0d76-6a3d-4ff1-bd54-15e42c65afa3 service nova] Acquiring lock "dd5d90e8-964a-4e1c-a98a-bcba37a1d79e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.538025] env[62070]: DEBUG oslo_concurrency.lockutils [req-eb31445d-53b3-4bf0-a8df-ed840ebf358b req-060f0d76-6a3d-4ff1-bd54-15e42c65afa3 service nova] Lock "dd5d90e8-964a-4e1c-a98a-bcba37a1d79e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 755.538242] env[62070]: DEBUG oslo_concurrency.lockutils [req-eb31445d-53b3-4bf0-a8df-ed840ebf358b req-060f0d76-6a3d-4ff1-bd54-15e42c65afa3 service nova] Lock "dd5d90e8-964a-4e1c-a98a-bcba37a1d79e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.538444] env[62070]: DEBUG nova.compute.manager [req-eb31445d-53b3-4bf0-a8df-ed840ebf358b req-060f0d76-6a3d-4ff1-bd54-15e42c65afa3 service nova] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] No waiting events found dispatching network-vif-plugged-f9ec7c6a-7ed9-4d9d-9e32-c182bc79b903 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 755.538712] env[62070]: WARNING nova.compute.manager [req-eb31445d-53b3-4bf0-a8df-ed840ebf358b req-060f0d76-6a3d-4ff1-bd54-15e42c65afa3 service nova] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Received unexpected event network-vif-plugged-f9ec7c6a-7ed9-4d9d-9e32-c182bc79b903 for instance with vm_state building and task_state spawning. [ 755.539036] env[62070]: DEBUG nova.compute.manager [req-eb31445d-53b3-4bf0-a8df-ed840ebf358b req-060f0d76-6a3d-4ff1-bd54-15e42c65afa3 service nova] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Received event network-changed-f9ec7c6a-7ed9-4d9d-9e32-c182bc79b903 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 755.539253] env[62070]: DEBUG nova.compute.manager [req-eb31445d-53b3-4bf0-a8df-ed840ebf358b req-060f0d76-6a3d-4ff1-bd54-15e42c65afa3 service nova] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Refreshing instance network info cache due to event network-changed-f9ec7c6a-7ed9-4d9d-9e32-c182bc79b903. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 755.539487] env[62070]: DEBUG oslo_concurrency.lockutils [req-eb31445d-53b3-4bf0-a8df-ed840ebf358b req-060f0d76-6a3d-4ff1-bd54-15e42c65afa3 service nova] Acquiring lock "refresh_cache-dd5d90e8-964a-4e1c-a98a-bcba37a1d79e" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 755.549604] env[62070]: DEBUG nova.compute.provider_tree [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 755.551393] env[62070]: DEBUG nova.compute.manager [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 755.619194] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121573, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.184893} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.619194] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 755.619194] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad0b75f-bbc7-4e8b-85f3-64f91f74345f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.624870] env[62070]: DEBUG nova.compute.manager [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 755.625764] env[62070]: DEBUG nova.virt.hardware [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 755.625764] env[62070]: DEBUG nova.virt.hardware [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 755.625975] env[62070]: DEBUG nova.virt.hardware [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 755.626106] env[62070]: DEBUG nova.virt.hardware [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 755.626260] env[62070]: DEBUG nova.virt.hardware [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 755.626409] env[62070]: DEBUG nova.virt.hardware [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 755.626609] env[62070]: DEBUG nova.virt.hardware [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 755.626770] env[62070]: DEBUG nova.virt.hardware [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 755.626950] env[62070]: DEBUG nova.virt.hardware [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 755.627126] env[62070]: DEBUG nova.virt.hardware [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 755.627295] env[62070]: DEBUG nova.virt.hardware [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 755.628512] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a0821f-416a-459c-a159-0f3656cbb465 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.650107] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] a3c42653-9a4b-42d3-bc38-8d46d95c8f64/a3c42653-9a4b-42d3-bc38-8d46d95c8f64.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 755.651029] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb55c000-0bf9-4206-a932-f12163fbd047 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.671627] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f550f53f-6cb4-4d69-b4e7-d9f3839887a4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.677305] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 755.677305] env[62070]: value = "task-1121575" [ 755.677305] env[62070]: _type = "Task" [ 755.677305] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.695691] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121575, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.949040] env[62070]: DEBUG nova.network.neutron [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 755.973011] env[62070]: DEBUG oslo_vmware.api [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Task: {'id': task-1121574, 'name': PowerOffVM_Task, 'duration_secs': 0.205746} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.973292] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 755.973494] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 755.973782] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a04d5b5-bb55-4ff3-ae48-91e874b272f0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.025352] env[62070]: DEBUG nova.compute.manager [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 756.039016] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 756.039308] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 756.039452] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Deleting the datastore file [datastore2] a3fcb849-b015-43aa-8f95-0d4a87e2cecc {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 756.039724] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b6408cd-a43e-4b0b-ab80-060708be0f17 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.047549] env[62070]: DEBUG oslo_vmware.api [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Waiting for the task: (returnval){ [ 756.047549] env[62070]: value = "task-1121577" [ 756.047549] env[62070]: _type = "Task" [ 756.047549] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.051902] env[62070]: DEBUG nova.virt.hardware [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 756.052132] env[62070]: DEBUG nova.virt.hardware [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 756.052283] env[62070]: DEBUG nova.virt.hardware [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 756.052457] env[62070]: DEBUG nova.virt.hardware [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 756.052594] env[62070]: DEBUG nova.virt.hardware [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 756.053153] env[62070]: DEBUG nova.virt.hardware [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 756.053153] env[62070]: DEBUG nova.virt.hardware [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 756.053153] env[62070]: DEBUG nova.virt.hardware [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 756.053363] env[62070]: DEBUG nova.virt.hardware [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 756.053406] env[62070]: DEBUG nova.virt.hardware [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 756.053569] env[62070]: DEBUG nova.virt.hardware [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 756.054345] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6064db41-882c-40a0-bbeb-8cc164d395a4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.057635] env[62070]: DEBUG nova.scheduler.client.report [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 756.067914] env[62070]: DEBUG oslo_vmware.api [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Task: {'id': task-1121577, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.075387] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ddcfd2-fdc4-48e2-97c5-181d43b644ba {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.090043] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 756.127092] env[62070]: DEBUG nova.network.neutron [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Updating instance_info_cache with network_info: [{"id": "f9ec7c6a-7ed9-4d9d-9e32-c182bc79b903", "address": "fa:16:3e:a5:7d:15", "network": {"id": "781d8af1-b8c8-42cb-b585-29fcaf2956c3", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-491072945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7ded80f14852431187bd5066e4c42d71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9ec7c6a-7e", "ovs_interfaceid": "f9ec7c6a-7ed9-4d9d-9e32-c182bc79b903", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.188963] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121575, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.227142] env[62070]: DEBUG nova.network.neutron [-] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.558595] env[62070]: DEBUG oslo_vmware.api [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Task: {'id': task-1121577, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.435677} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.558894] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 756.559249] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 756.559336] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 756.559558] env[62070]: INFO nova.compute.manager [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Took 1.12 seconds to destroy the instance on the hypervisor. [ 756.559748] env[62070]: DEBUG oslo.service.loopingcall [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 756.559960] env[62070]: DEBUG nova.compute.manager [-] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 756.560084] env[62070]: DEBUG nova.network.neutron [-] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 756.564629] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.591s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.565168] env[62070]: DEBUG nova.compute.manager [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 756.568060] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.059s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 756.569612] env[62070]: INFO nova.compute.claims [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 756.632050] env[62070]: DEBUG oslo_concurrency.lockutils [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Releasing lock "refresh_cache-dd5d90e8-964a-4e1c-a98a-bcba37a1d79e" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.632379] env[62070]: DEBUG nova.compute.manager [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Instance network_info: |[{"id": "f9ec7c6a-7ed9-4d9d-9e32-c182bc79b903", "address": "fa:16:3e:a5:7d:15", "network": {"id": "781d8af1-b8c8-42cb-b585-29fcaf2956c3", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-491072945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7ded80f14852431187bd5066e4c42d71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9ec7c6a-7e", "ovs_interfaceid": "f9ec7c6a-7ed9-4d9d-9e32-c182bc79b903", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 756.632666] env[62070]: DEBUG oslo_concurrency.lockutils [req-eb31445d-53b3-4bf0-a8df-ed840ebf358b req-060f0d76-6a3d-4ff1-bd54-15e42c65afa3 service nova] Acquired lock "refresh_cache-dd5d90e8-964a-4e1c-a98a-bcba37a1d79e" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.632844] env[62070]: DEBUG nova.network.neutron [req-eb31445d-53b3-4bf0-a8df-ed840ebf358b req-060f0d76-6a3d-4ff1-bd54-15e42c65afa3 service nova] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Refreshing network info cache for port f9ec7c6a-7ed9-4d9d-9e32-c182bc79b903 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 756.633967] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:7d:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f9ec7c6a-7ed9-4d9d-9e32-c182bc79b903', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 756.646082] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Creating folder: Project (7ded80f14852431187bd5066e4c42d71). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 756.647020] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-100b4379-7671-4850-aa87-1657b3295d40 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.666208] env[62070]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 756.666384] env[62070]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62070) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 756.666989] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Folder already exists: Project (7ded80f14852431187bd5066e4c42d71). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 756.667202] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Creating folder: Instances. Parent ref: group-v245323. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 756.667435] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48c5168a-ab1b-48df-943a-2dcb979c4b89 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.677219] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Created folder: Instances in parent group-v245323. [ 756.677549] env[62070]: DEBUG oslo.service.loopingcall [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 756.677776] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 756.677986] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-80360686-949c-4a3e-8995-66588ba132ca {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.708301] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121575, 'name': ReconfigVM_Task, 'duration_secs': 0.785255} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.711938] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Reconfigured VM instance instance-0000002e to attach disk [datastore1] a3c42653-9a4b-42d3-bc38-8d46d95c8f64/a3c42653-9a4b-42d3-bc38-8d46d95c8f64.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 756.711938] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 756.711938] env[62070]: value = "task-1121580" [ 756.711938] env[62070]: _type = "Task" [ 756.711938] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.711938] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0ab89067-7b7c-43e6-bcff-926c3e2d7b23 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.720679] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121580, 'name': CreateVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.722049] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 756.722049] env[62070]: value = "task-1121581" [ 756.722049] env[62070]: _type = "Task" [ 756.722049] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.732046] env[62070]: INFO nova.compute.manager [-] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Took 1.32 seconds to deallocate network for instance. [ 756.732046] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121581, 'name': Rename_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.073982] env[62070]: DEBUG nova.compute.utils [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 757.078348] env[62070]: DEBUG nova.compute.manager [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 757.078578] env[62070]: DEBUG nova.network.neutron [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 757.125798] env[62070]: DEBUG nova.policy [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '703a8952053b4a6a926b6199412744c0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a25a88ab06e440e3b9813e276194143a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 757.229024] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121580, 'name': CreateVM_Task, 'duration_secs': 0.425369} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.229024] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 757.230462] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245326', 'volume_id': 'b5d7c242-16d7-4aff-81f3-626f9f878e94', 'name': 'volume-b5d7c242-16d7-4aff-81f3-626f9f878e94', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'dd5d90e8-964a-4e1c-a98a-bcba37a1d79e', 'attached_at': '', 'detached_at': '', 'volume_id': 'b5d7c242-16d7-4aff-81f3-626f9f878e94', 'serial': 'b5d7c242-16d7-4aff-81f3-626f9f878e94'}, 'device_type': None, 'mount_device': '/dev/sda', 'disk_bus': None, 'delete_on_termination': True, 'attachment_id': '5328e075-ba11-47da-adaf-60aebd437e55', 'volume_type': None}], 'swap': None} {{(pid=62070) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 757.230970] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Root volume attach. Driver type: vmdk {{(pid=62070) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 757.232588] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31c75337-55d5-45f7-af2e-38776ce20c24 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.243130] env[62070]: DEBUG oslo_concurrency.lockutils [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.243676] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121581, 'name': Rename_Task, 'duration_secs': 0.14474} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.246421] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 757.247034] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ec4ea28-4866-444a-a15a-6b7c724a8338 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.252317] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ced95fe-332a-4897-96c5-b1f96eb9f431 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.257272] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 757.257272] env[62070]: value = "task-1121582" [ 757.257272] env[62070]: _type = "Task" [ 757.257272] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.264069] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebd97ad8-6302-4e74-9b5d-be3939536a49 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.271603] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121582, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.276221] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-66233e0a-fe7c-4607-85e3-1a7055e629d1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.283935] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Waiting for the task: (returnval){ [ 757.283935] env[62070]: value = "task-1121583" [ 757.283935] env[62070]: _type = "Task" [ 757.283935] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.297346] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121583, 'name': RelocateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.341231] env[62070]: DEBUG nova.network.neutron [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Successfully updated port: 6d2286a1-36df-47b8-93e7-424a5f04ce29 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 757.490018] env[62070]: DEBUG nova.network.neutron [req-eb31445d-53b3-4bf0-a8df-ed840ebf358b req-060f0d76-6a3d-4ff1-bd54-15e42c65afa3 service nova] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Updated VIF entry in instance network info cache for port f9ec7c6a-7ed9-4d9d-9e32-c182bc79b903. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 757.490018] env[62070]: DEBUG nova.network.neutron [req-eb31445d-53b3-4bf0-a8df-ed840ebf358b req-060f0d76-6a3d-4ff1-bd54-15e42c65afa3 service nova] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Updating instance_info_cache with network_info: [{"id": "f9ec7c6a-7ed9-4d9d-9e32-c182bc79b903", "address": "fa:16:3e:a5:7d:15", "network": {"id": "781d8af1-b8c8-42cb-b585-29fcaf2956c3", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-491072945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7ded80f14852431187bd5066e4c42d71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9ec7c6a-7e", "ovs_interfaceid": "f9ec7c6a-7ed9-4d9d-9e32-c182bc79b903", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.581135] env[62070]: DEBUG nova.compute.manager [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 757.633188] env[62070]: DEBUG nova.network.neutron [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Successfully created port: 3fd232c6-4869-4c79-8250-ae854eb69ae0 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 757.747868] env[62070]: DEBUG nova.network.neutron [-] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.769646] env[62070]: DEBUG nova.compute.manager [req-1501850e-62be-404e-9720-26900b9ff5c6 req-91875736-a690-4121-b900-e3f2ec8bd250 service nova] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Received event network-vif-deleted-c29b4364-dedb-48eb-b91d-45b2a60f1bdd {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 757.770064] env[62070]: DEBUG nova.compute.manager [req-1501850e-62be-404e-9720-26900b9ff5c6 req-91875736-a690-4121-b900-e3f2ec8bd250 service nova] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Received event network-vif-plugged-6d2286a1-36df-47b8-93e7-424a5f04ce29 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 757.770355] env[62070]: DEBUG oslo_concurrency.lockutils [req-1501850e-62be-404e-9720-26900b9ff5c6 req-91875736-a690-4121-b900-e3f2ec8bd250 service nova] Acquiring lock "13e3576e-4f4c-4541-a637-daa124cbf8dd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.770661] env[62070]: DEBUG oslo_concurrency.lockutils [req-1501850e-62be-404e-9720-26900b9ff5c6 req-91875736-a690-4121-b900-e3f2ec8bd250 service nova] Lock "13e3576e-4f4c-4541-a637-daa124cbf8dd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.770825] env[62070]: DEBUG oslo_concurrency.lockutils [req-1501850e-62be-404e-9720-26900b9ff5c6 req-91875736-a690-4121-b900-e3f2ec8bd250 service nova] Lock "13e3576e-4f4c-4541-a637-daa124cbf8dd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.771061] env[62070]: DEBUG nova.compute.manager [req-1501850e-62be-404e-9720-26900b9ff5c6 req-91875736-a690-4121-b900-e3f2ec8bd250 service nova] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] No waiting events found dispatching network-vif-plugged-6d2286a1-36df-47b8-93e7-424a5f04ce29 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 757.771378] env[62070]: WARNING nova.compute.manager [req-1501850e-62be-404e-9720-26900b9ff5c6 req-91875736-a690-4121-b900-e3f2ec8bd250 service nova] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Received unexpected event network-vif-plugged-6d2286a1-36df-47b8-93e7-424a5f04ce29 for instance with vm_state building and task_state spawning. [ 757.771611] env[62070]: DEBUG nova.compute.manager [req-1501850e-62be-404e-9720-26900b9ff5c6 req-91875736-a690-4121-b900-e3f2ec8bd250 service nova] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Received event network-vif-deleted-fcd8b688-1cad-4cae-a1db-2d606703425c {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 757.771839] env[62070]: INFO nova.compute.manager [req-1501850e-62be-404e-9720-26900b9ff5c6 req-91875736-a690-4121-b900-e3f2ec8bd250 service nova] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Neutron deleted interface fcd8b688-1cad-4cae-a1db-2d606703425c; detaching it from the instance and deleting it from the info cache [ 757.771989] env[62070]: DEBUG nova.network.neutron [req-1501850e-62be-404e-9720-26900b9ff5c6 req-91875736-a690-4121-b900-e3f2ec8bd250 service nova] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.786552] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121582, 'name': PowerOnVM_Task} progress is 79%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.802165] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121583, 'name': RelocateVM_Task} progress is 42%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.845261] env[62070]: DEBUG oslo_concurrency.lockutils [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "refresh_cache-13e3576e-4f4c-4541-a637-daa124cbf8dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.845435] env[62070]: DEBUG oslo_concurrency.lockutils [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired lock "refresh_cache-13e3576e-4f4c-4541-a637-daa124cbf8dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.845659] env[62070]: DEBUG nova.network.neutron [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 757.990969] env[62070]: DEBUG oslo_concurrency.lockutils [req-eb31445d-53b3-4bf0-a8df-ed840ebf358b req-060f0d76-6a3d-4ff1-bd54-15e42c65afa3 service nova] Releasing lock "refresh_cache-dd5d90e8-964a-4e1c-a98a-bcba37a1d79e" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.130334] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25cf13fe-bfd2-4172-bef4-d5eb3d1af391 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.141919] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac7040dd-03d5-4300-8806-8899489bf023 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.178381] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d0e21a6-6cf2-4071-88a3-f1caad605666 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.188411] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-914c01e1-1794-4a9c-ab19-e55ebbb52afa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.205898] env[62070]: DEBUG nova.compute.provider_tree [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 758.252803] env[62070]: INFO nova.compute.manager [-] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Took 1.69 seconds to deallocate network for instance. [ 758.274027] env[62070]: DEBUG oslo_vmware.api [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121582, 'name': PowerOnVM_Task, 'duration_secs': 0.673078} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.274027] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 758.274027] env[62070]: INFO nova.compute.manager [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Took 9.26 seconds to spawn the instance on the hypervisor. [ 758.274027] env[62070]: DEBUG nova.compute.manager [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 758.274784] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecb246d8-54f9-42a2-bfea-ec8de7e5e584 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.278518] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aad6144b-040b-49cc-bc40-3984de24acfc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.300059] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10544a4f-a287-4257-9f72-6c2c2f9e80db {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.320323] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121583, 'name': RelocateVM_Task} progress is 56%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.332341] env[62070]: DEBUG nova.compute.manager [req-1501850e-62be-404e-9720-26900b9ff5c6 req-91875736-a690-4121-b900-e3f2ec8bd250 service nova] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Detach interface failed, port_id=fcd8b688-1cad-4cae-a1db-2d606703425c, reason: Instance a3fcb849-b015-43aa-8f95-0d4a87e2cecc could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 758.333337] env[62070]: DEBUG nova.compute.manager [req-1501850e-62be-404e-9720-26900b9ff5c6 req-91875736-a690-4121-b900-e3f2ec8bd250 service nova] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Received event network-changed-6d2286a1-36df-47b8-93e7-424a5f04ce29 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 758.333337] env[62070]: DEBUG nova.compute.manager [req-1501850e-62be-404e-9720-26900b9ff5c6 req-91875736-a690-4121-b900-e3f2ec8bd250 service nova] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Refreshing instance network info cache due to event network-changed-6d2286a1-36df-47b8-93e7-424a5f04ce29. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 758.333337] env[62070]: DEBUG oslo_concurrency.lockutils [req-1501850e-62be-404e-9720-26900b9ff5c6 req-91875736-a690-4121-b900-e3f2ec8bd250 service nova] Acquiring lock "refresh_cache-13e3576e-4f4c-4541-a637-daa124cbf8dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.381732] env[62070]: DEBUG nova.network.neutron [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 758.560964] env[62070]: DEBUG nova.network.neutron [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Updating instance_info_cache with network_info: [{"id": "6d2286a1-36df-47b8-93e7-424a5f04ce29", "address": "fa:16:3e:9a:13:69", "network": {"id": "5ea0fffc-372c-450e-b27b-10959077d58f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1853458988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9191f0e6c2ee401abca64c0780e230bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3c995e9-7f2f-420c-880a-d60da6e708ad", "external-id": "nsx-vlan-transportzone-166", "segmentation_id": 166, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d2286a1-36", "ovs_interfaceid": "6d2286a1-36df-47b8-93e7-424a5f04ce29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.596479] env[62070]: DEBUG nova.compute.manager [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 758.623731] env[62070]: DEBUG nova.virt.hardware [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 758.623927] env[62070]: DEBUG nova.virt.hardware [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 758.624131] env[62070]: DEBUG nova.virt.hardware [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 758.624334] env[62070]: DEBUG nova.virt.hardware [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 758.624480] env[62070]: DEBUG nova.virt.hardware [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 758.624627] env[62070]: DEBUG nova.virt.hardware [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 758.624829] env[62070]: DEBUG nova.virt.hardware [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 758.624987] env[62070]: DEBUG nova.virt.hardware [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 758.625193] env[62070]: DEBUG nova.virt.hardware [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 758.625365] env[62070]: DEBUG nova.virt.hardware [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 758.625551] env[62070]: DEBUG nova.virt.hardware [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 758.626477] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d0bb93-5a09-424a-a92e-f87892ad9540 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.637750] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-347e7a43-3d93-4a8e-a883-0616fd4810d5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.710878] env[62070]: DEBUG nova.scheduler.client.report [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 758.760641] env[62070]: DEBUG oslo_concurrency.lockutils [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.802582] env[62070]: INFO nova.compute.manager [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Took 28.63 seconds to build instance. [ 758.809613] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121583, 'name': RelocateVM_Task} progress is 69%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.063706] env[62070]: DEBUG oslo_concurrency.lockutils [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Releasing lock "refresh_cache-13e3576e-4f4c-4541-a637-daa124cbf8dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.064127] env[62070]: DEBUG nova.compute.manager [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Instance network_info: |[{"id": "6d2286a1-36df-47b8-93e7-424a5f04ce29", "address": "fa:16:3e:9a:13:69", "network": {"id": "5ea0fffc-372c-450e-b27b-10959077d58f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1853458988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9191f0e6c2ee401abca64c0780e230bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3c995e9-7f2f-420c-880a-d60da6e708ad", "external-id": "nsx-vlan-transportzone-166", "segmentation_id": 166, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d2286a1-36", "ovs_interfaceid": "6d2286a1-36df-47b8-93e7-424a5f04ce29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 759.064406] env[62070]: DEBUG oslo_concurrency.lockutils [req-1501850e-62be-404e-9720-26900b9ff5c6 req-91875736-a690-4121-b900-e3f2ec8bd250 service nova] Acquired lock "refresh_cache-13e3576e-4f4c-4541-a637-daa124cbf8dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.064759] env[62070]: DEBUG nova.network.neutron [req-1501850e-62be-404e-9720-26900b9ff5c6 req-91875736-a690-4121-b900-e3f2ec8bd250 service nova] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Refreshing network info cache for port 6d2286a1-36df-47b8-93e7-424a5f04ce29 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 759.066116] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:13:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3c995e9-7f2f-420c-880a-d60da6e708ad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6d2286a1-36df-47b8-93e7-424a5f04ce29', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 759.074369] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Creating folder: Project (9191f0e6c2ee401abca64c0780e230bf). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 759.075171] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-26b9fd40-fedd-45db-956d-4abe02d2a8f9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.088176] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Created folder: Project (9191f0e6c2ee401abca64c0780e230bf) in parent group-v245319. [ 759.089316] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Creating folder: Instances. Parent ref: group-v245368. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 759.089316] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87491cc7-4079-4d01-9c12-31697a97ae9d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.103029] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Created folder: Instances in parent group-v245368. [ 759.103029] env[62070]: DEBUG oslo.service.loopingcall [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 759.103029] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 759.103029] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-03072c73-086f-4d42-b8d0-9186b9b47d91 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.127184] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 759.127184] env[62070]: value = "task-1121586" [ 759.127184] env[62070]: _type = "Task" [ 759.127184] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.137947] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121586, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.216865] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.648s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.217252] env[62070]: DEBUG nova.compute.manager [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 759.219957] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.407s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.224024] env[62070]: INFO nova.compute.claims [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 759.241203] env[62070]: DEBUG nova.compute.manager [req-645977e2-0495-41c2-9d09-758c93017a92 req-53f92e43-884d-40bc-b723-d8be7f13f0c9 service nova] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Received event network-vif-plugged-3fd232c6-4869-4c79-8250-ae854eb69ae0 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 759.241203] env[62070]: DEBUG oslo_concurrency.lockutils [req-645977e2-0495-41c2-9d09-758c93017a92 req-53f92e43-884d-40bc-b723-d8be7f13f0c9 service nova] Acquiring lock "748c94c7-1233-44f4-a71a-176b26518399-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 759.241203] env[62070]: DEBUG oslo_concurrency.lockutils [req-645977e2-0495-41c2-9d09-758c93017a92 req-53f92e43-884d-40bc-b723-d8be7f13f0c9 service nova] Lock "748c94c7-1233-44f4-a71a-176b26518399-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.241203] env[62070]: DEBUG oslo_concurrency.lockutils [req-645977e2-0495-41c2-9d09-758c93017a92 req-53f92e43-884d-40bc-b723-d8be7f13f0c9 service nova] Lock "748c94c7-1233-44f4-a71a-176b26518399-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.241203] env[62070]: DEBUG nova.compute.manager [req-645977e2-0495-41c2-9d09-758c93017a92 req-53f92e43-884d-40bc-b723-d8be7f13f0c9 service nova] [instance: 748c94c7-1233-44f4-a71a-176b26518399] No waiting events found dispatching network-vif-plugged-3fd232c6-4869-4c79-8250-ae854eb69ae0 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 759.241421] env[62070]: WARNING nova.compute.manager [req-645977e2-0495-41c2-9d09-758c93017a92 req-53f92e43-884d-40bc-b723-d8be7f13f0c9 service nova] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Received unexpected event network-vif-plugged-3fd232c6-4869-4c79-8250-ae854eb69ae0 for instance with vm_state building and task_state spawning. [ 759.310993] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7c1b8e29-6c3f-402f-942e-08b08c8eac7d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "a3c42653-9a4b-42d3-bc38-8d46d95c8f64" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 111.243s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.312285] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121583, 'name': RelocateVM_Task} progress is 82%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.400454] env[62070]: DEBUG nova.network.neutron [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Successfully updated port: 3fd232c6-4869-4c79-8250-ae854eb69ae0 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 759.645432] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121586, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.727046] env[62070]: DEBUG nova.compute.utils [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 759.730690] env[62070]: DEBUG nova.compute.manager [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 759.730690] env[62070]: DEBUG nova.network.neutron [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 759.804462] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121583, 'name': RelocateVM_Task} progress is 97%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.805987] env[62070]: DEBUG nova.policy [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c811421a3cb34cda9709b071376ee2a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '074c9a0a64ab4a2abb709986bb5c8e91', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 759.815551] env[62070]: DEBUG nova.compute.manager [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 759.860268] env[62070]: DEBUG nova.network.neutron [req-1501850e-62be-404e-9720-26900b9ff5c6 req-91875736-a690-4121-b900-e3f2ec8bd250 service nova] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Updated VIF entry in instance network info cache for port 6d2286a1-36df-47b8-93e7-424a5f04ce29. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 759.861257] env[62070]: DEBUG nova.network.neutron [req-1501850e-62be-404e-9720-26900b9ff5c6 req-91875736-a690-4121-b900-e3f2ec8bd250 service nova] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Updating instance_info_cache with network_info: [{"id": "6d2286a1-36df-47b8-93e7-424a5f04ce29", "address": "fa:16:3e:9a:13:69", "network": {"id": "5ea0fffc-372c-450e-b27b-10959077d58f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1853458988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9191f0e6c2ee401abca64c0780e230bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3c995e9-7f2f-420c-880a-d60da6e708ad", "external-id": "nsx-vlan-transportzone-166", "segmentation_id": 166, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d2286a1-36", "ovs_interfaceid": "6d2286a1-36df-47b8-93e7-424a5f04ce29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.904261] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Acquiring lock "refresh_cache-748c94c7-1233-44f4-a71a-176b26518399" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 759.904473] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Acquired lock "refresh_cache-748c94c7-1233-44f4-a71a-176b26518399" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.904473] env[62070]: DEBUG nova.network.neutron [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 760.146950] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121586, 'name': CreateVM_Task, 'duration_secs': 0.658725} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.146950] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 760.147362] env[62070]: DEBUG oslo_concurrency.lockutils [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 760.151022] env[62070]: DEBUG oslo_concurrency.lockutils [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.151022] env[62070]: DEBUG oslo_concurrency.lockutils [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 760.151022] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9b984e1-7f7c-4550-a4b1-7ad28bb8ad36 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.152822] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 760.152822] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ba8589-7df0-4371-fe3c-19159cf1e71d" [ 760.152822] env[62070]: _type = "Task" [ 760.152822] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.162907] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52ba8589-7df0-4371-fe3c-19159cf1e71d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.231383] env[62070]: DEBUG nova.compute.manager [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 760.273685] env[62070]: DEBUG nova.network.neutron [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Successfully created port: 98c76f66-e1d2-463c-b1a1-7d539909e2a9 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 760.304116] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121583, 'name': RelocateVM_Task} progress is 98%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.340689] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.364897] env[62070]: DEBUG oslo_concurrency.lockutils [req-1501850e-62be-404e-9720-26900b9ff5c6 req-91875736-a690-4121-b900-e3f2ec8bd250 service nova] Releasing lock "refresh_cache-13e3576e-4f4c-4541-a637-daa124cbf8dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 760.456970] env[62070]: DEBUG nova.network.neutron [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 760.668943] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52ba8589-7df0-4371-fe3c-19159cf1e71d, 'name': SearchDatastore_Task, 'duration_secs': 0.047611} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.669283] env[62070]: DEBUG oslo_concurrency.lockutils [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 760.669518] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 760.669931] env[62070]: DEBUG oslo_concurrency.lockutils [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 760.669931] env[62070]: DEBUG oslo_concurrency.lockutils [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.670158] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 760.670325] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c505af4a-221c-4b5a-a305-682f8385159b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.684131] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 760.684131] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 760.684131] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e590294-e915-43c1-8f25-8a9b9f43c597 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.689755] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 760.689755] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52edb759-8cc9-6c58-2a1a-13d32d16d7c7" [ 760.689755] env[62070]: _type = "Task" [ 760.689755] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.697507] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52edb759-8cc9-6c58-2a1a-13d32d16d7c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.698479] env[62070]: DEBUG nova.network.neutron [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Updating instance_info_cache with network_info: [{"id": "3fd232c6-4869-4c79-8250-ae854eb69ae0", "address": "fa:16:3e:3f:f0:95", "network": {"id": "bb187032-3d1c-4d95-9cf9-aa3d3ef0b45f", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1186437126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a25a88ab06e440e3b9813e276194143a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3fd232c6-48", "ovs_interfaceid": "3fd232c6-4869-4c79-8250-ae854eb69ae0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.738133] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-255695d6-f4cb-44da-b258-1f2321c5ff47 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.750575] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a36db16-d2cb-48e0-a1c5-26e7b751dd7a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.790197] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b32d21c6-f008-4d72-b38a-e238b8035200 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.802260] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-943c95d0-7a97-437f-a172-467d8ea5e4f3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.810866] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121583, 'name': RelocateVM_Task} progress is 98%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.819260] env[62070]: DEBUG nova.compute.provider_tree [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 761.200686] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52edb759-8cc9-6c58-2a1a-13d32d16d7c7, 'name': SearchDatastore_Task, 'duration_secs': 0.019839} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.201800] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5370790-d9a5-4b7d-9b08-4bab0d5b3a08 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.204682] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Releasing lock "refresh_cache-748c94c7-1233-44f4-a71a-176b26518399" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.205039] env[62070]: DEBUG nova.compute.manager [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Instance network_info: |[{"id": "3fd232c6-4869-4c79-8250-ae854eb69ae0", "address": "fa:16:3e:3f:f0:95", "network": {"id": "bb187032-3d1c-4d95-9cf9-aa3d3ef0b45f", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1186437126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a25a88ab06e440e3b9813e276194143a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3fd232c6-48", "ovs_interfaceid": "3fd232c6-4869-4c79-8250-ae854eb69ae0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 761.205474] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:f0:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5ce1511e-1eaa-45c6-a1ef-2b714c814fa1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3fd232c6-4869-4c79-8250-ae854eb69ae0', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 761.213198] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Creating folder: Project (a25a88ab06e440e3b9813e276194143a). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 761.213915] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7bc2b018-995a-4915-8abb-532da6312df0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.217180] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 761.217180] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ceaa38-760b-3138-a0ef-60e6f5e8fa89" [ 761.217180] env[62070]: _type = "Task" [ 761.217180] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.225496] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52ceaa38-760b-3138-a0ef-60e6f5e8fa89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.226750] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Created folder: Project (a25a88ab06e440e3b9813e276194143a) in parent group-v245319. [ 761.226947] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Creating folder: Instances. Parent ref: group-v245371. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 761.227210] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8078ed5a-8bde-4d5f-9a8a-90e07982dfcb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.235513] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Created folder: Instances in parent group-v245371. [ 761.235786] env[62070]: DEBUG oslo.service.loopingcall [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 761.235987] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 761.236204] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a110acd9-db54-4198-bb6d-442681768fe9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.252575] env[62070]: DEBUG nova.compute.manager [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 761.258693] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 761.258693] env[62070]: value = "task-1121589" [ 761.258693] env[62070]: _type = "Task" [ 761.258693] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.263991] env[62070]: DEBUG nova.compute.manager [req-0cfc9740-68b3-4cad-a004-e47b15bc4aaf req-b214fe4f-d03f-4103-a709-995a4b1ece1f service nova] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Received event network-changed-3fd232c6-4869-4c79-8250-ae854eb69ae0 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 761.264366] env[62070]: DEBUG nova.compute.manager [req-0cfc9740-68b3-4cad-a004-e47b15bc4aaf req-b214fe4f-d03f-4103-a709-995a4b1ece1f service nova] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Refreshing instance network info cache due to event network-changed-3fd232c6-4869-4c79-8250-ae854eb69ae0. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 761.264460] env[62070]: DEBUG oslo_concurrency.lockutils [req-0cfc9740-68b3-4cad-a004-e47b15bc4aaf req-b214fe4f-d03f-4103-a709-995a4b1ece1f service nova] Acquiring lock "refresh_cache-748c94c7-1233-44f4-a71a-176b26518399" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.264571] env[62070]: DEBUG oslo_concurrency.lockutils [req-0cfc9740-68b3-4cad-a004-e47b15bc4aaf req-b214fe4f-d03f-4103-a709-995a4b1ece1f service nova] Acquired lock "refresh_cache-748c94c7-1233-44f4-a71a-176b26518399" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.264741] env[62070]: DEBUG nova.network.neutron [req-0cfc9740-68b3-4cad-a004-e47b15bc4aaf req-b214fe4f-d03f-4103-a709-995a4b1ece1f service nova] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Refreshing network info cache for port 3fd232c6-4869-4c79-8250-ae854eb69ae0 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 761.270857] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121589, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.278520] env[62070]: DEBUG nova.virt.hardware [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 761.278752] env[62070]: DEBUG nova.virt.hardware [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 761.278905] env[62070]: DEBUG nova.virt.hardware [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 761.279096] env[62070]: DEBUG nova.virt.hardware [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 761.279247] env[62070]: DEBUG nova.virt.hardware [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 761.279802] env[62070]: DEBUG nova.virt.hardware [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 761.279802] env[62070]: DEBUG nova.virt.hardware [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 761.279802] env[62070]: DEBUG nova.virt.hardware [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 761.279958] env[62070]: DEBUG nova.virt.hardware [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 761.280105] env[62070]: DEBUG nova.virt.hardware [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 761.280221] env[62070]: DEBUG nova.virt.hardware [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 761.281397] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c21468-bbd0-4415-a501-eef52591dc60 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.289646] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae5f5f3-ce7b-464e-a51e-e6dc55afbe2f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.312477] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121583, 'name': RelocateVM_Task} progress is 98%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.322587] env[62070]: DEBUG nova.scheduler.client.report [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 761.359558] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "65fe3720-95cb-4620-b1c7-eae9e3bc3943" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.359796] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "65fe3720-95cb-4620-b1c7-eae9e3bc3943" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.727523] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52ceaa38-760b-3138-a0ef-60e6f5e8fa89, 'name': SearchDatastore_Task, 'duration_secs': 0.0142} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.727819] env[62070]: DEBUG oslo_concurrency.lockutils [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.728119] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 13e3576e-4f4c-4541-a637-daa124cbf8dd/13e3576e-4f4c-4541-a637-daa124cbf8dd.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 761.728410] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fa794332-8364-4684-adb3-c9940d22b1f6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.735905] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 761.735905] env[62070]: value = "task-1121590" [ 761.735905] env[62070]: _type = "Task" [ 761.735905] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.743380] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121590, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.769329] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121589, 'name': CreateVM_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.814042] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121583, 'name': RelocateVM_Task} progress is 98%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.827133] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.607s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.827980] env[62070]: DEBUG nova.compute.manager [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 761.830421] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.203s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.830729] env[62070]: DEBUG nova.objects.instance [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lazy-loading 'resources' on Instance uuid 076aed5b-4b08-4f3b-a940-d9cd95c32e57 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 761.867773] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 761.868457] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 761.927664] env[62070]: DEBUG nova.compute.manager [req-1dafef8d-1b66-4017-8735-521405db8200 req-3f87f475-003e-4f7e-be33-79b15cb649b1 service nova] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Received event network-vif-plugged-98c76f66-e1d2-463c-b1a1-7d539909e2a9 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 761.929292] env[62070]: DEBUG oslo_concurrency.lockutils [req-1dafef8d-1b66-4017-8735-521405db8200 req-3f87f475-003e-4f7e-be33-79b15cb649b1 service nova] Acquiring lock "359ae9f2-a907-459e-99b9-3e043d5d015f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.929292] env[62070]: DEBUG oslo_concurrency.lockutils [req-1dafef8d-1b66-4017-8735-521405db8200 req-3f87f475-003e-4f7e-be33-79b15cb649b1 service nova] Lock "359ae9f2-a907-459e-99b9-3e043d5d015f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.929292] env[62070]: DEBUG oslo_concurrency.lockutils [req-1dafef8d-1b66-4017-8735-521405db8200 req-3f87f475-003e-4f7e-be33-79b15cb649b1 service nova] Lock "359ae9f2-a907-459e-99b9-3e043d5d015f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.929292] env[62070]: DEBUG nova.compute.manager [req-1dafef8d-1b66-4017-8735-521405db8200 req-3f87f475-003e-4f7e-be33-79b15cb649b1 service nova] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] No waiting events found dispatching network-vif-plugged-98c76f66-e1d2-463c-b1a1-7d539909e2a9 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 761.929292] env[62070]: WARNING nova.compute.manager [req-1dafef8d-1b66-4017-8735-521405db8200 req-3f87f475-003e-4f7e-be33-79b15cb649b1 service nova] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Received unexpected event network-vif-plugged-98c76f66-e1d2-463c-b1a1-7d539909e2a9 for instance with vm_state building and task_state spawning. [ 762.020203] env[62070]: DEBUG nova.network.neutron [req-0cfc9740-68b3-4cad-a004-e47b15bc4aaf req-b214fe4f-d03f-4103-a709-995a4b1ece1f service nova] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Updated VIF entry in instance network info cache for port 3fd232c6-4869-4c79-8250-ae854eb69ae0. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 762.021353] env[62070]: DEBUG nova.network.neutron [req-0cfc9740-68b3-4cad-a004-e47b15bc4aaf req-b214fe4f-d03f-4103-a709-995a4b1ece1f service nova] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Updating instance_info_cache with network_info: [{"id": "3fd232c6-4869-4c79-8250-ae854eb69ae0", "address": "fa:16:3e:3f:f0:95", "network": {"id": "bb187032-3d1c-4d95-9cf9-aa3d3ef0b45f", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1186437126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a25a88ab06e440e3b9813e276194143a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3fd232c6-48", "ovs_interfaceid": "3fd232c6-4869-4c79-8250-ae854eb69ae0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.056753] env[62070]: DEBUG nova.network.neutron [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Successfully updated port: 98c76f66-e1d2-463c-b1a1-7d539909e2a9 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 762.248433] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121590, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.272759] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121589, 'name': CreateVM_Task, 'duration_secs': 0.669539} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.272987] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 762.273730] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 762.273905] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.274264] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 762.274536] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fadc983-6843-42bf-980a-9588694cd8cb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.280646] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Waiting for the task: (returnval){ [ 762.280646] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52d30016-dcdc-c59c-77b8-306c168769e5" [ 762.280646] env[62070]: _type = "Task" [ 762.280646] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.291671] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52d30016-dcdc-c59c-77b8-306c168769e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.313886] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121583, 'name': RelocateVM_Task} progress is 98%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.334031] env[62070]: DEBUG nova.compute.utils [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 762.335302] env[62070]: DEBUG nova.compute.manager [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 762.335477] env[62070]: DEBUG nova.network.neutron [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 762.377140] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.378125] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Starting heal instance info cache {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 762.378125] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Rebuilding the list of instances to heal {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 762.407114] env[62070]: DEBUG nova.policy [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c811421a3cb34cda9709b071376ee2a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '074c9a0a64ab4a2abb709986bb5c8e91', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 762.524909] env[62070]: DEBUG oslo_concurrency.lockutils [req-0cfc9740-68b3-4cad-a004-e47b15bc4aaf req-b214fe4f-d03f-4103-a709-995a4b1ece1f service nova] Releasing lock "refresh_cache-748c94c7-1233-44f4-a71a-176b26518399" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.560622] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Acquiring lock "refresh_cache-359ae9f2-a907-459e-99b9-3e043d5d015f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 762.560772] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Acquired lock "refresh_cache-359ae9f2-a907-459e-99b9-3e043d5d015f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.560924] env[62070]: DEBUG nova.network.neutron [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 762.728905] env[62070]: DEBUG nova.network.neutron [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Successfully created port: 43964a23-7533-4cae-9837-85e33059c929 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 762.746256] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121590, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.791211] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52d30016-dcdc-c59c-77b8-306c168769e5, 'name': SearchDatastore_Task, 'duration_secs': 0.043747} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.794088] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.794358] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 762.794623] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 762.794798] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.795039] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 762.795569] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b62e857d-b739-4021-8662-e04cfe1a4919 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.816621] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 762.816904] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 762.817683] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-639fd650-6dee-4596-ab41-df632e8a6ccd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.823927] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Waiting for the task: (returnval){ [ 762.823927] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52558e24-74df-8f61-d2d3-c12df5fd9559" [ 762.823927] env[62070]: _type = "Task" [ 762.823927] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.826902] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121583, 'name': RelocateVM_Task, 'duration_secs': 5.364049} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.833050] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Volume attach. Driver type: vmdk {{(pid=62070) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 762.833285] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245326', 'volume_id': 'b5d7c242-16d7-4aff-81f3-626f9f878e94', 'name': 'volume-b5d7c242-16d7-4aff-81f3-626f9f878e94', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'dd5d90e8-964a-4e1c-a98a-bcba37a1d79e', 'attached_at': '', 'detached_at': '', 'volume_id': 'b5d7c242-16d7-4aff-81f3-626f9f878e94', 'serial': 'b5d7c242-16d7-4aff-81f3-626f9f878e94'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 762.834348] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a08b8d-61cb-4cfc-8b81-dee70864406a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.838837] env[62070]: DEBUG nova.compute.manager [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 762.856320] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52558e24-74df-8f61-d2d3-c12df5fd9559, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.859854] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dde763c-a159-44b9-8bba-ac965d337d0b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.886034] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] volume-b5d7c242-16d7-4aff-81f3-626f9f878e94/volume-b5d7c242-16d7-4aff-81f3-626f9f878e94.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 762.892810] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Skipping network cache update for instance because it is Building. {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 762.892986] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Skipping network cache update for instance because it is Building. {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 762.893141] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Skipping network cache update for instance because it is Building. {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 762.893286] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Skipping network cache update for instance because it is Building. {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 762.893417] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Skipping network cache update for instance because it is Building. {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 762.893660] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "refresh_cache-d148d561-3211-4f1f-965a-f2b14cd60b11" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 762.893771] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquired lock "refresh_cache-d148d561-3211-4f1f-965a-f2b14cd60b11" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.893904] env[62070]: DEBUG nova.network.neutron [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Forcefully refreshing network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 762.894073] env[62070]: DEBUG nova.objects.instance [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lazy-loading 'info_cache' on Instance uuid d148d561-3211-4f1f-965a-f2b14cd60b11 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 762.895383] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cab6beb1-1bba-4e74-88fc-6c5245346ba4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.912364] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff60d0c-60a1-4962-b2f0-a02ea40f116d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.921135] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-affe24f7-ffae-45a2-bb2f-a0de2e2ef735 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.925655] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Waiting for the task: (returnval){ [ 762.925655] env[62070]: value = "task-1121591" [ 762.925655] env[62070]: _type = "Task" [ 762.925655] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.955465] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03dab8b0-3080-4d17-810d-5eafd8b60703 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.962129] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121591, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.967132] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-405f46b1-9877-4d2b-871c-aa7fb898b32c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.982707] env[62070]: DEBUG nova.compute.provider_tree [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 763.093444] env[62070]: DEBUG nova.network.neutron [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 763.244439] env[62070]: DEBUG nova.network.neutron [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Updating instance_info_cache with network_info: [{"id": "98c76f66-e1d2-463c-b1a1-7d539909e2a9", "address": "fa:16:3e:83:6d:a8", "network": {"id": "df33a08d-88db-4a22-846f-5b414705fc65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.214", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9d42cb2bbadf40d6b35f237f71234611", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98c76f66-e1", "ovs_interfaceid": "98c76f66-e1d2-463c-b1a1-7d539909e2a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.248650] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121590, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.337620] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52558e24-74df-8f61-d2d3-c12df5fd9559, 'name': SearchDatastore_Task, 'duration_secs': 0.042275} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.338483] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b242dc6-1c6f-463b-8cb7-b2c79759e0ca {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.344547] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Waiting for the task: (returnval){ [ 763.344547] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]527a92e7-915e-9366-ccc1-b0ddd02939da" [ 763.344547] env[62070]: _type = "Task" [ 763.344547] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.352651] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527a92e7-915e-9366-ccc1-b0ddd02939da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.436882] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121591, 'name': ReconfigVM_Task, 'duration_secs': 0.290247} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.437049] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Reconfigured VM instance instance-0000002f to attach disk [datastore2] volume-b5d7c242-16d7-4aff-81f3-626f9f878e94/volume-b5d7c242-16d7-4aff-81f3-626f9f878e94.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 763.442292] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-568de9a5-ef5b-47c8-93a5-d973d5bd8ac0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.457510] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Waiting for the task: (returnval){ [ 763.457510] env[62070]: value = "task-1121592" [ 763.457510] env[62070]: _type = "Task" [ 763.457510] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.466257] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121592, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.485840] env[62070]: DEBUG nova.scheduler.client.report [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 763.749016] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121590, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.633322} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.749464] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Releasing lock "refresh_cache-359ae9f2-a907-459e-99b9-3e043d5d015f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.749756] env[62070]: DEBUG nova.compute.manager [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Instance network_info: |[{"id": "98c76f66-e1d2-463c-b1a1-7d539909e2a9", "address": "fa:16:3e:83:6d:a8", "network": {"id": "df33a08d-88db-4a22-846f-5b414705fc65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.214", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9d42cb2bbadf40d6b35f237f71234611", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98c76f66-e1", "ovs_interfaceid": "98c76f66-e1d2-463c-b1a1-7d539909e2a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 763.750017] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 13e3576e-4f4c-4541-a637-daa124cbf8dd/13e3576e-4f4c-4541-a637-daa124cbf8dd.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 763.750230] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 763.750585] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:6d:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4728adca-2846-416a-91a3-deb898faf1f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '98c76f66-e1d2-463c-b1a1-7d539909e2a9', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 763.757801] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Creating folder: Project (074c9a0a64ab4a2abb709986bb5c8e91). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 763.758086] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-28950f52-3742-4137-8837-5bac9b8922fe {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.760165] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fe7ddd57-a432-4f70-bdb9-dbb9e1c32b26 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.768039] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 763.768039] env[62070]: value = "task-1121593" [ 763.768039] env[62070]: _type = "Task" [ 763.768039] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.772497] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Created folder: Project (074c9a0a64ab4a2abb709986bb5c8e91) in parent group-v245319. [ 763.772671] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Creating folder: Instances. Parent ref: group-v245374. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 763.775482] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8004775a-10c4-4156-875f-12f6419db50a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.776858] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121593, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.787057] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Created folder: Instances in parent group-v245374. [ 763.787057] env[62070]: DEBUG oslo.service.loopingcall [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 763.787057] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 763.787057] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-101cc5e6-1cc8-423e-bd07-2467534ebf9a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.805082] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 763.805082] env[62070]: value = "task-1121596" [ 763.805082] env[62070]: _type = "Task" [ 763.805082] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.812169] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121596, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.855574] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527a92e7-915e-9366-ccc1-b0ddd02939da, 'name': SearchDatastore_Task, 'duration_secs': 0.037493} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.856858] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.856858] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 748c94c7-1233-44f4-a71a-176b26518399/748c94c7-1233-44f4-a71a-176b26518399.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 763.856858] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bfeafb9b-568d-4586-ae55-a8d42659d7f3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.862807] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Waiting for the task: (returnval){ [ 763.862807] env[62070]: value = "task-1121597" [ 763.862807] env[62070]: _type = "Task" [ 763.862807] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.866885] env[62070]: DEBUG nova.compute.manager [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 763.874345] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Task: {'id': task-1121597, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.894676] env[62070]: DEBUG nova.virt.hardware [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 763.895014] env[62070]: DEBUG nova.virt.hardware [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 763.895243] env[62070]: DEBUG nova.virt.hardware [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 763.895450] env[62070]: DEBUG nova.virt.hardware [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 763.895610] env[62070]: DEBUG nova.virt.hardware [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 763.895796] env[62070]: DEBUG nova.virt.hardware [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 763.896030] env[62070]: DEBUG nova.virt.hardware [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 763.896194] env[62070]: DEBUG nova.virt.hardware [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 763.896386] env[62070]: DEBUG nova.virt.hardware [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 763.896544] env[62070]: DEBUG nova.virt.hardware [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 763.896713] env[62070]: DEBUG nova.virt.hardware [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 763.897652] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24cd2017-bf47-41c0-b4f8-42f2fac6d44f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.905427] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d375cb19-8ba8-4857-9613-c78560b7f71b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.969303] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121592, 'name': ReconfigVM_Task, 'duration_secs': 0.135273} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.969496] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245326', 'volume_id': 'b5d7c242-16d7-4aff-81f3-626f9f878e94', 'name': 'volume-b5d7c242-16d7-4aff-81f3-626f9f878e94', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'dd5d90e8-964a-4e1c-a98a-bcba37a1d79e', 'attached_at': '', 'detached_at': '', 'volume_id': 'b5d7c242-16d7-4aff-81f3-626f9f878e94', 'serial': 'b5d7c242-16d7-4aff-81f3-626f9f878e94'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 763.970232] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-10c207e8-ca4b-4c88-8d5a-2e72236d1e28 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.977815] env[62070]: DEBUG nova.compute.manager [req-a4a948ea-c0e0-4d41-be7c-dfbd6f5b5bc4 req-0688893d-230f-4e2f-9a50-e826571a17df service nova] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Received event network-changed-98c76f66-e1d2-463c-b1a1-7d539909e2a9 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 763.978104] env[62070]: DEBUG nova.compute.manager [req-a4a948ea-c0e0-4d41-be7c-dfbd6f5b5bc4 req-0688893d-230f-4e2f-9a50-e826571a17df service nova] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Refreshing instance network info cache due to event network-changed-98c76f66-e1d2-463c-b1a1-7d539909e2a9. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 763.978249] env[62070]: DEBUG oslo_concurrency.lockutils [req-a4a948ea-c0e0-4d41-be7c-dfbd6f5b5bc4 req-0688893d-230f-4e2f-9a50-e826571a17df service nova] Acquiring lock "refresh_cache-359ae9f2-a907-459e-99b9-3e043d5d015f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.978404] env[62070]: DEBUG oslo_concurrency.lockutils [req-a4a948ea-c0e0-4d41-be7c-dfbd6f5b5bc4 req-0688893d-230f-4e2f-9a50-e826571a17df service nova] Acquired lock "refresh_cache-359ae9f2-a907-459e-99b9-3e043d5d015f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.978567] env[62070]: DEBUG nova.network.neutron [req-a4a948ea-c0e0-4d41-be7c-dfbd6f5b5bc4 req-0688893d-230f-4e2f-9a50-e826571a17df service nova] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Refreshing network info cache for port 98c76f66-e1d2-463c-b1a1-7d539909e2a9 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 763.983010] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Waiting for the task: (returnval){ [ 763.983010] env[62070]: value = "task-1121598" [ 763.983010] env[62070]: _type = "Task" [ 763.983010] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.993182] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.163s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.995871] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121598, 'name': Rename_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.997379] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.363s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.997462] env[62070]: DEBUG nova.objects.instance [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lazy-loading 'resources' on Instance uuid 1c1730e5-88af-4c7f-8bcc-d494db2cd723 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 764.019070] env[62070]: INFO nova.scheduler.client.report [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Deleted allocations for instance 076aed5b-4b08-4f3b-a940-d9cd95c32e57 [ 764.279518] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121593, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.315594] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121596, 'name': CreateVM_Task, 'duration_secs': 0.339572} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.315806] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 764.316537] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 764.316700] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.317081] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 764.317360] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7745d03-afdc-4df3-a730-47f9d9b2b0da {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.322930] env[62070]: DEBUG oslo_vmware.api [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 764.322930] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]524e7a76-5563-09e1-f86b-6a5ff748cb21" [ 764.322930] env[62070]: _type = "Task" [ 764.322930] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.331452] env[62070]: DEBUG oslo_vmware.api [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]524e7a76-5563-09e1-f86b-6a5ff748cb21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.371772] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Task: {'id': task-1121597, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483938} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.372038] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 748c94c7-1233-44f4-a71a-176b26518399/748c94c7-1233-44f4-a71a-176b26518399.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 764.372255] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 764.372489] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f8ffe777-4e36-495b-9933-a722393d0358 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.381551] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Waiting for the task: (returnval){ [ 764.381551] env[62070]: value = "task-1121599" [ 764.381551] env[62070]: _type = "Task" [ 764.381551] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.390742] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Task: {'id': task-1121599, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.413768] env[62070]: DEBUG nova.network.neutron [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Successfully updated port: 43964a23-7533-4cae-9837-85e33059c929 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 764.499023] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121598, 'name': Rename_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.531658] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5160861d-89ab-415d-b303-ef069e25196d tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "076aed5b-4b08-4f3b-a940-d9cd95c32e57" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.920s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.716175] env[62070]: DEBUG nova.network.neutron [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Updating instance_info_cache with network_info: [{"id": "c5e6098a-ebbb-4eee-ba72-4ddaad679830", "address": "fa:16:3e:7b:ab:3c", "network": {"id": "df33a08d-88db-4a22-846f-5b414705fc65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9d42cb2bbadf40d6b35f237f71234611", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5e6098a-eb", "ovs_interfaceid": "c5e6098a-ebbb-4eee-ba72-4ddaad679830", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.778366] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121593, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.862483} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.780920] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 764.781930] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c10c7b6-8547-4b34-b5eb-bc5c63291b1d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.805716] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] 13e3576e-4f4c-4541-a637-daa124cbf8dd/13e3576e-4f4c-4541-a637-daa124cbf8dd.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 764.809421] env[62070]: DEBUG nova.network.neutron [req-a4a948ea-c0e0-4d41-be7c-dfbd6f5b5bc4 req-0688893d-230f-4e2f-9a50-e826571a17df service nova] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Updated VIF entry in instance network info cache for port 98c76f66-e1d2-463c-b1a1-7d539909e2a9. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 764.809517] env[62070]: DEBUG nova.network.neutron [req-a4a948ea-c0e0-4d41-be7c-dfbd6f5b5bc4 req-0688893d-230f-4e2f-9a50-e826571a17df service nova] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Updating instance_info_cache with network_info: [{"id": "98c76f66-e1d2-463c-b1a1-7d539909e2a9", "address": "fa:16:3e:83:6d:a8", "network": {"id": "df33a08d-88db-4a22-846f-5b414705fc65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.214", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9d42cb2bbadf40d6b35f237f71234611", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98c76f66-e1", "ovs_interfaceid": "98c76f66-e1d2-463c-b1a1-7d539909e2a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.810868] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95818f82-db0e-4e33-8401-d91c30da22e0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.828959] env[62070]: DEBUG oslo_concurrency.lockutils [req-a4a948ea-c0e0-4d41-be7c-dfbd6f5b5bc4 req-0688893d-230f-4e2f-9a50-e826571a17df service nova] Releasing lock "refresh_cache-359ae9f2-a907-459e-99b9-3e043d5d015f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.839195] env[62070]: DEBUG oslo_vmware.api [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]524e7a76-5563-09e1-f86b-6a5ff748cb21, 'name': SearchDatastore_Task, 'duration_secs': 0.009345} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.842674] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.842899] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 764.843183] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 764.843334] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.843511] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 764.843848] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 764.843848] env[62070]: value = "task-1121600" [ 764.843848] env[62070]: _type = "Task" [ 764.843848] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.844214] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-67cdfa0e-1d6c-4cd4-85e1-2c92dbeea359 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.855499] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121600, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.859077] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 764.859270] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 764.861066] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ddc638f-3563-4baa-a5c2-b78bb6c2101a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.865144] env[62070]: DEBUG oslo_vmware.api [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 764.865144] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]520565d4-065b-9ee1-2d1d-3299fd3ddc8d" [ 764.865144] env[62070]: _type = "Task" [ 764.865144] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.873277] env[62070]: DEBUG oslo_vmware.api [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]520565d4-065b-9ee1-2d1d-3299fd3ddc8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.893904] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Task: {'id': task-1121599, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.243171} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.894263] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 764.895080] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-422162ba-29db-4192-b361-10d6b2b6974c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.922262] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Reconfiguring VM instance instance-00000031 to attach disk [datastore2] 748c94c7-1233-44f4-a71a-176b26518399/748c94c7-1233-44f4-a71a-176b26518399.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 764.926315] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Acquiring lock "refresh_cache-242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 764.926315] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Acquired lock "refresh_cache-242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.926315] env[62070]: DEBUG nova.network.neutron [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 764.931118] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e8397dc-3670-43d7-8cb2-f5713e08a9c8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.952630] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Waiting for the task: (returnval){ [ 764.952630] env[62070]: value = "task-1121601" [ 764.952630] env[62070]: _type = "Task" [ 764.952630] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.963212] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Task: {'id': task-1121601, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.987268] env[62070]: DEBUG nova.network.neutron [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 764.997512] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121598, 'name': Rename_Task, 'duration_secs': 0.69652} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.997512] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 764.997512] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd17e650-41fe-4191-bc4f-b7fe85d6d80d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.006724] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Waiting for the task: (returnval){ [ 765.006724] env[62070]: value = "task-1121602" [ 765.006724] env[62070]: _type = "Task" [ 765.006724] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.018997] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121602, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.075388] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b64f342-9e47-45d8-97f5-eddd119efd6e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.083043] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd152e9b-6a78-4e6f-a5bd-8ce4a323f7c9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.118229] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-099c4613-fae8-4e78-8431-6b35fa32b67c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.126301] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70fd61dc-a3ab-423d-94b0-1d59f1929736 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.139848] env[62070]: DEBUG nova.compute.provider_tree [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 765.202139] env[62070]: DEBUG nova.network.neutron [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Updating instance_info_cache with network_info: [{"id": "43964a23-7533-4cae-9837-85e33059c929", "address": "fa:16:3e:80:ce:af", "network": {"id": "df33a08d-88db-4a22-846f-5b414705fc65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.226", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9d42cb2bbadf40d6b35f237f71234611", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43964a23-75", "ovs_interfaceid": "43964a23-7533-4cae-9837-85e33059c929", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.218852] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Releasing lock "refresh_cache-d148d561-3211-4f1f-965a-f2b14cd60b11" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.219074] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Updated the network info_cache for instance {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 765.219523] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 765.219795] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 765.219994] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 765.220203] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 765.220393] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 765.220577] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 765.220708] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62070) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 765.220851] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 765.357202] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121600, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.374303] env[62070]: DEBUG oslo_vmware.api [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]520565d4-065b-9ee1-2d1d-3299fd3ddc8d, 'name': SearchDatastore_Task, 'duration_secs': 0.022718} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.375073] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2a5c17e-af39-42c8-97d1-421ac6415f0b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.379901] env[62070]: DEBUG oslo_vmware.api [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 765.379901] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52bd6305-fe3b-44bc-63c5-5b00d5da4464" [ 765.379901] env[62070]: _type = "Task" [ 765.379901] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.387197] env[62070]: DEBUG oslo_vmware.api [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52bd6305-fe3b-44bc-63c5-5b00d5da4464, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.461152] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Task: {'id': task-1121601, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.516596] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121602, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.643037] env[62070]: DEBUG nova.scheduler.client.report [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 765.705120] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Releasing lock "refresh_cache-242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.705253] env[62070]: DEBUG nova.compute.manager [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Instance network_info: |[{"id": "43964a23-7533-4cae-9837-85e33059c929", "address": "fa:16:3e:80:ce:af", "network": {"id": "df33a08d-88db-4a22-846f-5b414705fc65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.226", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9d42cb2bbadf40d6b35f237f71234611", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43964a23-75", "ovs_interfaceid": "43964a23-7533-4cae-9837-85e33059c929", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 765.705993] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:ce:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4728adca-2846-416a-91a3-deb898faf1f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '43964a23-7533-4cae-9837-85e33059c929', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 765.713560] env[62070]: DEBUG oslo.service.loopingcall [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 765.713794] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 765.714023] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-287b36df-a3ff-493b-80cc-fcfd37ae6d72 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.732110] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.741158] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 765.741158] env[62070]: value = "task-1121603" [ 765.741158] env[62070]: _type = "Task" [ 765.741158] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.747399] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121603, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.855594] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121600, 'name': ReconfigVM_Task, 'duration_secs': 0.80631} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.855891] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Reconfigured VM instance instance-00000030 to attach disk [datastore2] 13e3576e-4f4c-4541-a637-daa124cbf8dd/13e3576e-4f4c-4541-a637-daa124cbf8dd.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 765.856566] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3f8f4e78-7a60-4cf8-a429-027b6e1182eb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.863290] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 765.863290] env[62070]: value = "task-1121604" [ 765.863290] env[62070]: _type = "Task" [ 765.863290] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.871342] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121604, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.891382] env[62070]: DEBUG oslo_vmware.api [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52bd6305-fe3b-44bc-63c5-5b00d5da4464, 'name': SearchDatastore_Task, 'duration_secs': 0.049509} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.891677] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.891993] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 359ae9f2-a907-459e-99b9-3e043d5d015f/359ae9f2-a907-459e-99b9-3e043d5d015f.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 765.892344] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-863a5c79-09ea-4b57-ae53-90cf488690e4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.899313] env[62070]: DEBUG oslo_vmware.api [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 765.899313] env[62070]: value = "task-1121605" [ 765.899313] env[62070]: _type = "Task" [ 765.899313] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.908799] env[62070]: DEBUG oslo_vmware.api [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121605, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.962446] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Task: {'id': task-1121601, 'name': ReconfigVM_Task, 'duration_secs': 0.736026} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.962764] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Reconfigured VM instance instance-00000031 to attach disk [datastore2] 748c94c7-1233-44f4-a71a-176b26518399/748c94c7-1233-44f4-a71a-176b26518399.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 765.963557] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ff9696c8-5dd9-4a7a-a025-6dca1ac96f14 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.971136] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Waiting for the task: (returnval){ [ 765.971136] env[62070]: value = "task-1121606" [ 765.971136] env[62070]: _type = "Task" [ 765.971136] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.980247] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Task: {'id': task-1121606, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.016985] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121602, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.042233] env[62070]: DEBUG nova.compute.manager [req-18b00e49-fa8e-42f8-8c06-4e94a1b46464 req-494392e3-b99a-499c-8900-3472d0677855 service nova] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Received event network-vif-plugged-43964a23-7533-4cae-9837-85e33059c929 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 766.042756] env[62070]: DEBUG oslo_concurrency.lockutils [req-18b00e49-fa8e-42f8-8c06-4e94a1b46464 req-494392e3-b99a-499c-8900-3472d0677855 service nova] Acquiring lock "242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.042948] env[62070]: DEBUG oslo_concurrency.lockutils [req-18b00e49-fa8e-42f8-8c06-4e94a1b46464 req-494392e3-b99a-499c-8900-3472d0677855 service nova] Lock "242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.043115] env[62070]: DEBUG oslo_concurrency.lockutils [req-18b00e49-fa8e-42f8-8c06-4e94a1b46464 req-494392e3-b99a-499c-8900-3472d0677855 service nova] Lock "242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.043305] env[62070]: DEBUG nova.compute.manager [req-18b00e49-fa8e-42f8-8c06-4e94a1b46464 req-494392e3-b99a-499c-8900-3472d0677855 service nova] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] No waiting events found dispatching network-vif-plugged-43964a23-7533-4cae-9837-85e33059c929 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 766.043501] env[62070]: WARNING nova.compute.manager [req-18b00e49-fa8e-42f8-8c06-4e94a1b46464 req-494392e3-b99a-499c-8900-3472d0677855 service nova] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Received unexpected event network-vif-plugged-43964a23-7533-4cae-9837-85e33059c929 for instance with vm_state building and task_state spawning. [ 766.043686] env[62070]: DEBUG nova.compute.manager [req-18b00e49-fa8e-42f8-8c06-4e94a1b46464 req-494392e3-b99a-499c-8900-3472d0677855 service nova] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Received event network-changed-43964a23-7533-4cae-9837-85e33059c929 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 766.043922] env[62070]: DEBUG nova.compute.manager [req-18b00e49-fa8e-42f8-8c06-4e94a1b46464 req-494392e3-b99a-499c-8900-3472d0677855 service nova] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Refreshing instance network info cache due to event network-changed-43964a23-7533-4cae-9837-85e33059c929. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 766.044241] env[62070]: DEBUG oslo_concurrency.lockutils [req-18b00e49-fa8e-42f8-8c06-4e94a1b46464 req-494392e3-b99a-499c-8900-3472d0677855 service nova] Acquiring lock "refresh_cache-242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.044332] env[62070]: DEBUG oslo_concurrency.lockutils [req-18b00e49-fa8e-42f8-8c06-4e94a1b46464 req-494392e3-b99a-499c-8900-3472d0677855 service nova] Acquired lock "refresh_cache-242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.044492] env[62070]: DEBUG nova.network.neutron [req-18b00e49-fa8e-42f8-8c06-4e94a1b46464 req-494392e3-b99a-499c-8900-3472d0677855 service nova] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Refreshing network info cache for port 43964a23-7533-4cae-9837-85e33059c929 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 766.148047] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.151s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.151085] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.018s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.152497] env[62070]: INFO nova.compute.claims [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 766.176619] env[62070]: INFO nova.scheduler.client.report [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Deleted allocations for instance 1c1730e5-88af-4c7f-8bcc-d494db2cd723 [ 766.253215] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121603, 'name': CreateVM_Task, 'duration_secs': 0.369016} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.253337] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 766.254152] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.254400] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.254790] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 766.256051] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fcd6faa0-8c67-464a-b3ed-fa3065595ab3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.262762] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 766.262762] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52b506cb-a1d6-ad95-a675-7901ede70148" [ 766.262762] env[62070]: _type = "Task" [ 766.262762] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.271097] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52b506cb-a1d6-ad95-a675-7901ede70148, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.375205] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121604, 'name': Rename_Task, 'duration_secs': 0.162435} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.375505] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 766.375723] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b7edce19-4eaf-4758-9490-f01a78cca220 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.383408] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 766.383408] env[62070]: value = "task-1121607" [ 766.383408] env[62070]: _type = "Task" [ 766.383408] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.391878] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121607, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.409819] env[62070]: DEBUG oslo_vmware.api [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121605, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.480433] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Task: {'id': task-1121606, 'name': Rename_Task, 'duration_secs': 0.168502} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.480711] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 766.480985] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f939564-9359-4031-8f16-f0d5c3718f0f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.486094] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Waiting for the task: (returnval){ [ 766.486094] env[62070]: value = "task-1121608" [ 766.486094] env[62070]: _type = "Task" [ 766.486094] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.493209] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Task: {'id': task-1121608, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.515391] env[62070]: DEBUG oslo_vmware.api [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121602, 'name': PowerOnVM_Task, 'duration_secs': 1.259802} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.515648] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 766.515874] env[62070]: INFO nova.compute.manager [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Took 10.89 seconds to spawn the instance on the hypervisor. [ 766.516071] env[62070]: DEBUG nova.compute.manager [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 766.516875] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83ed2b1-f130-46f1-a183-7d755764bcb3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.684048] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5a8984d4-9dbc-4d3d-b314-9d28f25f6fb9 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "1c1730e5-88af-4c7f-8bcc-d494db2cd723" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.135s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.773387] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52b506cb-a1d6-ad95-a675-7901ede70148, 'name': SearchDatastore_Task, 'duration_secs': 0.060915} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.773723] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.773970] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 766.774520] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.774520] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.774520] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 766.774772] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c5e7050-862a-4e3b-bd36-0d32bc985539 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.783351] env[62070]: DEBUG nova.network.neutron [req-18b00e49-fa8e-42f8-8c06-4e94a1b46464 req-494392e3-b99a-499c-8900-3472d0677855 service nova] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Updated VIF entry in instance network info cache for port 43964a23-7533-4cae-9837-85e33059c929. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 766.783691] env[62070]: DEBUG nova.network.neutron [req-18b00e49-fa8e-42f8-8c06-4e94a1b46464 req-494392e3-b99a-499c-8900-3472d0677855 service nova] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Updating instance_info_cache with network_info: [{"id": "43964a23-7533-4cae-9837-85e33059c929", "address": "fa:16:3e:80:ce:af", "network": {"id": "df33a08d-88db-4a22-846f-5b414705fc65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.226", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9d42cb2bbadf40d6b35f237f71234611", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43964a23-75", "ovs_interfaceid": "43964a23-7533-4cae-9837-85e33059c929", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.787322] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 766.787498] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 766.791275] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a8c9052-35b8-43c5-a2e7-20beadcc97f2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.795282] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 766.795282] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]529daa2b-5875-71a0-f402-11b7221a3149" [ 766.795282] env[62070]: _type = "Task" [ 766.795282] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.804039] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]529daa2b-5875-71a0-f402-11b7221a3149, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.895255] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121607, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.908951] env[62070]: DEBUG oslo_vmware.api [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121605, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.560659} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.909244] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 359ae9f2-a907-459e-99b9-3e043d5d015f/359ae9f2-a907-459e-99b9-3e043d5d015f.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 766.909479] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 766.909728] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7d86d0f8-baba-43c3-aa73-43f0f9382418 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.916863] env[62070]: DEBUG oslo_vmware.api [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 766.916863] env[62070]: value = "task-1121609" [ 766.916863] env[62070]: _type = "Task" [ 766.916863] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.924561] env[62070]: DEBUG oslo_vmware.api [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121609, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.995760] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Task: {'id': task-1121608, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.040589] env[62070]: INFO nova.compute.manager [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Took 32.08 seconds to build instance. [ 767.287449] env[62070]: DEBUG oslo_concurrency.lockutils [req-18b00e49-fa8e-42f8-8c06-4e94a1b46464 req-494392e3-b99a-499c-8900-3472d0677855 service nova] Releasing lock "refresh_cache-242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.305265] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]529daa2b-5875-71a0-f402-11b7221a3149, 'name': SearchDatastore_Task, 'duration_secs': 0.01578} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.309318] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b7d8813-463f-4871-ab00-b649c6181f22 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.314626] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 767.314626] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52c154a2-6e7a-7791-c2c7-015b3dc9c0ea" [ 767.314626] env[62070]: _type = "Task" [ 767.314626] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.322610] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52c154a2-6e7a-7791-c2c7-015b3dc9c0ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.393521] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121607, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.426505] env[62070]: DEBUG oslo_vmware.api [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121609, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.270146} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.426505] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 767.429606] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bac68339-4848-46fb-806f-2b1cfcad6b53 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.453938] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] 359ae9f2-a907-459e-99b9-3e043d5d015f/359ae9f2-a907-459e-99b9-3e043d5d015f.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 767.457213] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb8c8211-d22e-4c0a-b018-94f20765eb90 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.478420] env[62070]: DEBUG oslo_vmware.api [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 767.478420] env[62070]: value = "task-1121610" [ 767.478420] env[62070]: _type = "Task" [ 767.478420] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.489874] env[62070]: DEBUG oslo_vmware.api [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121610, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.504019] env[62070]: DEBUG oslo_vmware.api [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Task: {'id': task-1121608, 'name': PowerOnVM_Task, 'duration_secs': 0.626184} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.504019] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 767.504019] env[62070]: INFO nova.compute.manager [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Took 8.91 seconds to spawn the instance on the hypervisor. [ 767.504019] env[62070]: DEBUG nova.compute.manager [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 767.504019] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e13c6537-074e-4b7b-9af2-d4b0f9e313cb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.545807] env[62070]: DEBUG oslo_concurrency.lockutils [None req-884b25a6-886e-4a2f-85f7-4f7a1048c76f tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Lock "dd5d90e8-964a-4e1c-a98a-bcba37a1d79e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 117.737s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.641701] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ecb607-ebdb-4272-8dd3-6b229f776bb7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.649049] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57505807-6ca4-4477-be8f-a56216559bc9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.685605] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c359da80-8d5d-4b8f-9688-00402a8aab9c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.693147] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-221006b7-9e4c-484b-ab43-6d2e5cb50d74 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.706907] env[62070]: DEBUG nova.compute.provider_tree [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 767.825125] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52c154a2-6e7a-7791-c2c7-015b3dc9c0ea, 'name': SearchDatastore_Task, 'duration_secs': 0.023109} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.825308] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.825561] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc/242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 767.825837] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c2dec06-1a1a-45dc-a391-d82ea7fb025e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.833304] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 767.833304] env[62070]: value = "task-1121611" [ 767.833304] env[62070]: _type = "Task" [ 767.833304] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.841432] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121611, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.894232] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121607, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.988039] env[62070]: DEBUG oslo_vmware.api [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121610, 'name': ReconfigVM_Task, 'duration_secs': 0.307662} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.988373] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Reconfigured VM instance instance-00000032 to attach disk [datastore2] 359ae9f2-a907-459e-99b9-3e043d5d015f/359ae9f2-a907-459e-99b9-3e043d5d015f.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 767.989007] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f941ce53-c815-4db6-bf91-0b18918ef380 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.994873] env[62070]: DEBUG oslo_vmware.api [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 767.994873] env[62070]: value = "task-1121612" [ 767.994873] env[62070]: _type = "Task" [ 767.994873] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.003469] env[62070]: DEBUG oslo_vmware.api [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121612, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.033851] env[62070]: INFO nova.compute.manager [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Took 30.04 seconds to build instance. [ 768.034046] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "27987ff6-77c9-4876-8b39-dcc20ce4158a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.034414] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "27987ff6-77c9-4876-8b39-dcc20ce4158a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.049082] env[62070]: DEBUG nova.compute.manager [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 768.064128] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "e5deccf6-f967-4e3c-bee0-2e1ad0bb4560" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.064128] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "e5deccf6-f967-4e3c-bee0-2e1ad0bb4560" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.210194] env[62070]: DEBUG nova.scheduler.client.report [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 768.284844] env[62070]: DEBUG nova.compute.manager [req-afb67de9-952d-4ba2-a6a2-da892296d79d req-043a2011-ea7b-4f57-9459-4142095078bc service nova] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Received event network-changed-f9ec7c6a-7ed9-4d9d-9e32-c182bc79b903 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 768.285148] env[62070]: DEBUG nova.compute.manager [req-afb67de9-952d-4ba2-a6a2-da892296d79d req-043a2011-ea7b-4f57-9459-4142095078bc service nova] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Refreshing instance network info cache due to event network-changed-f9ec7c6a-7ed9-4d9d-9e32-c182bc79b903. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 768.288021] env[62070]: DEBUG oslo_concurrency.lockutils [req-afb67de9-952d-4ba2-a6a2-da892296d79d req-043a2011-ea7b-4f57-9459-4142095078bc service nova] Acquiring lock "refresh_cache-dd5d90e8-964a-4e1c-a98a-bcba37a1d79e" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 768.288021] env[62070]: DEBUG oslo_concurrency.lockutils [req-afb67de9-952d-4ba2-a6a2-da892296d79d req-043a2011-ea7b-4f57-9459-4142095078bc service nova] Acquired lock "refresh_cache-dd5d90e8-964a-4e1c-a98a-bcba37a1d79e" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.288021] env[62070]: DEBUG nova.network.neutron [req-afb67de9-952d-4ba2-a6a2-da892296d79d req-043a2011-ea7b-4f57-9459-4142095078bc service nova] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Refreshing network info cache for port f9ec7c6a-7ed9-4d9d-9e32-c182bc79b903 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 768.345012] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121611, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.396141] env[62070]: DEBUG oslo_vmware.api [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121607, 'name': PowerOnVM_Task, 'duration_secs': 1.574761} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.396879] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 768.397354] env[62070]: INFO nova.compute.manager [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Took 12.37 seconds to spawn the instance on the hypervisor. [ 768.397652] env[62070]: DEBUG nova.compute.manager [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 768.398532] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c621f2-89ff-4fa1-9b88-0fe053e598c1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.506333] env[62070]: DEBUG oslo_vmware.api [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121612, 'name': Rename_Task, 'duration_secs': 0.181939} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.506681] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 768.506977] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-416d6fb2-037b-400e-8c03-ff726ad1a93f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.512899] env[62070]: DEBUG oslo_vmware.api [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 768.512899] env[62070]: value = "task-1121613" [ 768.512899] env[62070]: _type = "Task" [ 768.512899] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.526516] env[62070]: DEBUG oslo_vmware.api [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121613, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.536152] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c8619bac-3476-49c3-92b7-dcaac00d2762 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Lock "748c94c7-1233-44f4-a71a-176b26518399" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 114.781s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.572668] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.720021] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.567s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.720021] env[62070]: DEBUG nova.compute.manager [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 768.721440] env[62070]: DEBUG oslo_concurrency.lockutils [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.451s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.723297] env[62070]: INFO nova.compute.claims [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 768.844794] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121611, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.5403} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.845165] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc/242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 768.845430] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 768.845676] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-353f5e3e-1fa3-4a51-b39c-d83c1aebbf88 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.854121] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 768.854121] env[62070]: value = "task-1121614" [ 768.854121] env[62070]: _type = "Task" [ 768.854121] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.864040] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121614, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.920954] env[62070]: INFO nova.compute.manager [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Took 31.75 seconds to build instance. [ 769.024446] env[62070]: DEBUG oslo_vmware.api [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121613, 'name': PowerOnVM_Task, 'duration_secs': 0.466696} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.024704] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 769.024903] env[62070]: INFO nova.compute.manager [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Took 7.77 seconds to spawn the instance on the hypervisor. [ 769.025330] env[62070]: DEBUG nova.compute.manager [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 769.026175] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f61500d-2373-4b77-abb2-ac8696895a08 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.039777] env[62070]: DEBUG nova.compute.manager [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 769.144494] env[62070]: DEBUG nova.network.neutron [req-afb67de9-952d-4ba2-a6a2-da892296d79d req-043a2011-ea7b-4f57-9459-4142095078bc service nova] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Updated VIF entry in instance network info cache for port f9ec7c6a-7ed9-4d9d-9e32-c182bc79b903. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 769.144812] env[62070]: DEBUG nova.network.neutron [req-afb67de9-952d-4ba2-a6a2-da892296d79d req-043a2011-ea7b-4f57-9459-4142095078bc service nova] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Updating instance_info_cache with network_info: [{"id": "f9ec7c6a-7ed9-4d9d-9e32-c182bc79b903", "address": "fa:16:3e:a5:7d:15", "network": {"id": "781d8af1-b8c8-42cb-b585-29fcaf2956c3", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-491072945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7ded80f14852431187bd5066e4c42d71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9ec7c6a-7e", "ovs_interfaceid": "f9ec7c6a-7ed9-4d9d-9e32-c182bc79b903", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.227928] env[62070]: DEBUG nova.compute.utils [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 769.231558] env[62070]: DEBUG nova.compute.manager [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 769.232120] env[62070]: DEBUG nova.network.neutron [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 769.293310] env[62070]: DEBUG nova.policy [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '04d846d68c434d3da1defd28796dc857', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74acdcc195b04159bb779f1c87cc8e2f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 769.364534] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121614, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078344} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.364534] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 769.364882] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef559c5-6363-4922-b46f-e6a41ae5873b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.391770] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc/242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 769.391917] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd007b33-0c2c-423f-80d7-45029cee7c61 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.413214] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 769.413214] env[62070]: value = "task-1121615" [ 769.413214] env[62070]: _type = "Task" [ 769.413214] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.421968] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121615, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.423570] env[62070]: DEBUG oslo_concurrency.lockutils [None req-682a69a1-3ef5-481f-9200-81623493ef57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "13e3576e-4f4c-4541-a637-daa124cbf8dd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 116.754s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.546337] env[62070]: INFO nova.compute.manager [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Took 26.09 seconds to build instance. [ 769.569186] env[62070]: DEBUG oslo_concurrency.lockutils [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.591569] env[62070]: DEBUG nova.network.neutron [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Successfully created port: ac60129b-8af9-4075-a1d9-9f0ecfe5aedf {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 769.648575] env[62070]: DEBUG oslo_concurrency.lockutils [req-afb67de9-952d-4ba2-a6a2-da892296d79d req-043a2011-ea7b-4f57-9459-4142095078bc service nova] Releasing lock "refresh_cache-dd5d90e8-964a-4e1c-a98a-bcba37a1d79e" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 769.734887] env[62070]: DEBUG nova.compute.manager [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 769.930437] env[62070]: DEBUG nova.compute.manager [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 769.933242] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121615, 'name': ReconfigVM_Task, 'duration_secs': 0.483039} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.933622] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Reconfigured VM instance instance-00000033 to attach disk [datastore2] 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc/242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 769.938321] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dbbabca3-edfe-46a2-852f-4ac9408fc059 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.947384] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 769.947384] env[62070]: value = "task-1121616" [ 769.947384] env[62070]: _type = "Task" [ 769.947384] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.960308] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121616, 'name': Rename_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.049770] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e4f46748-c02f-46f3-a136-2698cbfaeb86 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lock "359ae9f2-a907-459e-99b9-3e043d5d015f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 111.197s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 770.291596] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fe7da16-d50b-4114-887c-766e1731d863 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.299820] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ad7de1-5de3-48f3-86d5-2b7f0c37f213 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.331910] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfccaff3-51d8-47e9-84d7-2e80a352f2d6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.335473] env[62070]: DEBUG nova.compute.manager [req-70c03854-0f33-4df9-81e4-f169cbf8ebf5 req-e6f35754-9d3e-4f42-aec7-0fce95a628fe service nova] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Received event network-changed-3fd232c6-4869-4c79-8250-ae854eb69ae0 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 770.335899] env[62070]: DEBUG nova.compute.manager [req-70c03854-0f33-4df9-81e4-f169cbf8ebf5 req-e6f35754-9d3e-4f42-aec7-0fce95a628fe service nova] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Refreshing instance network info cache due to event network-changed-3fd232c6-4869-4c79-8250-ae854eb69ae0. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 770.335899] env[62070]: DEBUG oslo_concurrency.lockutils [req-70c03854-0f33-4df9-81e4-f169cbf8ebf5 req-e6f35754-9d3e-4f42-aec7-0fce95a628fe service nova] Acquiring lock "refresh_cache-748c94c7-1233-44f4-a71a-176b26518399" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.336095] env[62070]: DEBUG oslo_concurrency.lockutils [req-70c03854-0f33-4df9-81e4-f169cbf8ebf5 req-e6f35754-9d3e-4f42-aec7-0fce95a628fe service nova] Acquired lock "refresh_cache-748c94c7-1233-44f4-a71a-176b26518399" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.336381] env[62070]: DEBUG nova.network.neutron [req-70c03854-0f33-4df9-81e4-f169cbf8ebf5 req-e6f35754-9d3e-4f42-aec7-0fce95a628fe service nova] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Refreshing network info cache for port 3fd232c6-4869-4c79-8250-ae854eb69ae0 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 770.342609] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6cf6702-a758-403a-8265-f3955d13ff14 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.356882] env[62070]: DEBUG nova.compute.provider_tree [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 770.456566] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.462667] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121616, 'name': Rename_Task, 'duration_secs': 0.345095} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.462993] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 770.463397] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-16d0fe04-d9d1-4131-af0d-0e74830e5a3b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.468073] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e853c04a-59ac-41da-9df0-20098d493a59 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "13e3576e-4f4c-4541-a637-daa124cbf8dd" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.468073] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e853c04a-59ac-41da-9df0-20098d493a59 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "13e3576e-4f4c-4541-a637-daa124cbf8dd" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.471311] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 770.471311] env[62070]: value = "task-1121617" [ 770.471311] env[62070]: _type = "Task" [ 770.471311] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.481582] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121617, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.551692] env[62070]: DEBUG nova.compute.manager [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 770.744189] env[62070]: DEBUG nova.compute.manager [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 770.769648] env[62070]: DEBUG nova.virt.hardware [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 770.769890] env[62070]: DEBUG nova.virt.hardware [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 770.770083] env[62070]: DEBUG nova.virt.hardware [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 770.770292] env[62070]: DEBUG nova.virt.hardware [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 770.770438] env[62070]: DEBUG nova.virt.hardware [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 770.770584] env[62070]: DEBUG nova.virt.hardware [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 770.770787] env[62070]: DEBUG nova.virt.hardware [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 770.770942] env[62070]: DEBUG nova.virt.hardware [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 770.771172] env[62070]: DEBUG nova.virt.hardware [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 770.771339] env[62070]: DEBUG nova.virt.hardware [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 770.771544] env[62070]: DEBUG nova.virt.hardware [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 770.772396] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ae87e16-9ee3-4fae-b118-3c9566e49ae8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.780299] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57a31af4-66e0-41ae-966d-efc91d460969 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.860733] env[62070]: DEBUG nova.scheduler.client.report [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 770.973163] env[62070]: DEBUG nova.compute.utils [None req-e853c04a-59ac-41da-9df0-20098d493a59 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 770.987680] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121617, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.074541] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.127328] env[62070]: DEBUG nova.network.neutron [req-70c03854-0f33-4df9-81e4-f169cbf8ebf5 req-e6f35754-9d3e-4f42-aec7-0fce95a628fe service nova] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Updated VIF entry in instance network info cache for port 3fd232c6-4869-4c79-8250-ae854eb69ae0. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 771.127709] env[62070]: DEBUG nova.network.neutron [req-70c03854-0f33-4df9-81e4-f169cbf8ebf5 req-e6f35754-9d3e-4f42-aec7-0fce95a628fe service nova] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Updating instance_info_cache with network_info: [{"id": "3fd232c6-4869-4c79-8250-ae854eb69ae0", "address": "fa:16:3e:3f:f0:95", "network": {"id": "bb187032-3d1c-4d95-9cf9-aa3d3ef0b45f", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1186437126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a25a88ab06e440e3b9813e276194143a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3fd232c6-48", "ovs_interfaceid": "3fd232c6-4869-4c79-8250-ae854eb69ae0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.230045] env[62070]: DEBUG nova.network.neutron [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Successfully updated port: ac60129b-8af9-4075-a1d9-9f0ecfe5aedf {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 771.365902] env[62070]: DEBUG oslo_concurrency.lockutils [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.644s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.366483] env[62070]: DEBUG nova.compute.manager [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 771.369105] env[62070]: DEBUG oslo_concurrency.lockutils [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 17.582s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.481241] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e853c04a-59ac-41da-9df0-20098d493a59 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "13e3576e-4f4c-4541-a637-daa124cbf8dd" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.013s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.485191] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121617, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.630833] env[62070]: DEBUG oslo_concurrency.lockutils [req-70c03854-0f33-4df9-81e4-f169cbf8ebf5 req-e6f35754-9d3e-4f42-aec7-0fce95a628fe service nova] Releasing lock "refresh_cache-748c94c7-1233-44f4-a71a-176b26518399" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 771.732765] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Acquiring lock "refresh_cache-20e7a993-b1fb-4359-ab35-8b0f06ca0121" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 771.732928] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Acquired lock "refresh_cache-20e7a993-b1fb-4359-ab35-8b0f06ca0121" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.733112] env[62070]: DEBUG nova.network.neutron [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 771.873628] env[62070]: DEBUG nova.compute.utils [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 771.875199] env[62070]: DEBUG nova.objects.instance [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Lazy-loading 'migration_context' on Instance uuid d148d561-3211-4f1f-965a-f2b14cd60b11 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 771.877620] env[62070]: DEBUG nova.compute.manager [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 771.877713] env[62070]: DEBUG nova.network.neutron [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 771.926166] env[62070]: DEBUG nova.policy [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d5c9f37a9bc41a49629dd309aa9023e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a4448e8a041f494d8faf51ef6d88c635', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 771.988233] env[62070]: DEBUG oslo_vmware.api [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121617, 'name': PowerOnVM_Task, 'duration_secs': 1.090867} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.988233] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 771.988322] env[62070]: INFO nova.compute.manager [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Took 8.12 seconds to spawn the instance on the hypervisor. [ 771.988817] env[62070]: DEBUG nova.compute.manager [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 771.992018] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00d1b4d7-1a5a-488f-bf3f-27464cd47292 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.214053] env[62070]: DEBUG nova.network.neutron [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Successfully created port: 9cd48720-b8f6-4b76-ba6d-e6f0d9916a62 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 772.269868] env[62070]: DEBUG nova.network.neutron [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 772.381297] env[62070]: DEBUG nova.compute.manager [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 772.425100] env[62070]: DEBUG nova.compute.manager [req-7498c8c7-d1f9-42cf-9592-6ab4b3bffd90 req-069dc66a-f3f9-4a01-b103-cf484d2a1f6b service nova] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Received event network-vif-plugged-ac60129b-8af9-4075-a1d9-9f0ecfe5aedf {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 772.425100] env[62070]: DEBUG oslo_concurrency.lockutils [req-7498c8c7-d1f9-42cf-9592-6ab4b3bffd90 req-069dc66a-f3f9-4a01-b103-cf484d2a1f6b service nova] Acquiring lock "20e7a993-b1fb-4359-ab35-8b0f06ca0121-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.425100] env[62070]: DEBUG oslo_concurrency.lockutils [req-7498c8c7-d1f9-42cf-9592-6ab4b3bffd90 req-069dc66a-f3f9-4a01-b103-cf484d2a1f6b service nova] Lock "20e7a993-b1fb-4359-ab35-8b0f06ca0121-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.425100] env[62070]: DEBUG oslo_concurrency.lockutils [req-7498c8c7-d1f9-42cf-9592-6ab4b3bffd90 req-069dc66a-f3f9-4a01-b103-cf484d2a1f6b service nova] Lock "20e7a993-b1fb-4359-ab35-8b0f06ca0121-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 772.425100] env[62070]: DEBUG nova.compute.manager [req-7498c8c7-d1f9-42cf-9592-6ab4b3bffd90 req-069dc66a-f3f9-4a01-b103-cf484d2a1f6b service nova] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] No waiting events found dispatching network-vif-plugged-ac60129b-8af9-4075-a1d9-9f0ecfe5aedf {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 772.425464] env[62070]: WARNING nova.compute.manager [req-7498c8c7-d1f9-42cf-9592-6ab4b3bffd90 req-069dc66a-f3f9-4a01-b103-cf484d2a1f6b service nova] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Received unexpected event network-vif-plugged-ac60129b-8af9-4075-a1d9-9f0ecfe5aedf for instance with vm_state building and task_state spawning. [ 772.425464] env[62070]: DEBUG nova.compute.manager [req-7498c8c7-d1f9-42cf-9592-6ab4b3bffd90 req-069dc66a-f3f9-4a01-b103-cf484d2a1f6b service nova] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Received event network-changed-ac60129b-8af9-4075-a1d9-9f0ecfe5aedf {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 772.431020] env[62070]: DEBUG nova.compute.manager [req-7498c8c7-d1f9-42cf-9592-6ab4b3bffd90 req-069dc66a-f3f9-4a01-b103-cf484d2a1f6b service nova] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Refreshing instance network info cache due to event network-changed-ac60129b-8af9-4075-a1d9-9f0ecfe5aedf. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 772.431020] env[62070]: DEBUG oslo_concurrency.lockutils [req-7498c8c7-d1f9-42cf-9592-6ab4b3bffd90 req-069dc66a-f3f9-4a01-b103-cf484d2a1f6b service nova] Acquiring lock "refresh_cache-20e7a993-b1fb-4359-ab35-8b0f06ca0121" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.489016] env[62070]: DEBUG nova.network.neutron [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Updating instance_info_cache with network_info: [{"id": "ac60129b-8af9-4075-a1d9-9f0ecfe5aedf", "address": "fa:16:3e:fa:64:ad", "network": {"id": "6f72c6d4-603a-48b0-82e9-9a781b6ae750", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-967685926-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74acdcc195b04159bb779f1c87cc8e2f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e30245c5-78f5-48e6-b504-c6c21f5a9b45", "external-id": "nsx-vlan-transportzone-409", "segmentation_id": 409, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac60129b-8a", "ovs_interfaceid": "ac60129b-8af9-4075-a1d9-9f0ecfe5aedf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.516196] env[62070]: INFO nova.compute.manager [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Took 28.72 seconds to build instance. [ 772.558897] env[62070]: DEBUG nova.network.neutron [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Successfully created port: 266d9e51-fe0e-458d-a1b5-ecdae4dd0c6b {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 772.564224] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e853c04a-59ac-41da-9df0-20098d493a59 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "13e3576e-4f4c-4541-a637-daa124cbf8dd" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.564459] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e853c04a-59ac-41da-9df0-20098d493a59 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "13e3576e-4f4c-4541-a637-daa124cbf8dd" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.564676] env[62070]: INFO nova.compute.manager [None req-e853c04a-59ac-41da-9df0-20098d493a59 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Attaching volume 36041495-d67a-4503-9146-0c73ccad5d5f to /dev/sdb [ 772.614161] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e5801c-0292-4c96-98a5-b465cab636c7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.622857] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79e017bf-6a0c-4e8e-ab71-18d6108656a0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.640754] env[62070]: DEBUG nova.virt.block_device [None req-e853c04a-59ac-41da-9df0-20098d493a59 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Updating existing volume attachment record: 72fdecd0-c7f6-4d9e-9991-c52f6bb55d99 {{(pid=62070) _volume_attach /opt/stack/nova/nova/virt/block_device.py:679}} [ 772.977137] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd66418e-21ef-4039-897c-68834830ecaf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.985456] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95449bd7-4b55-4d46-95c6-148e7f24c19f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.024779] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Releasing lock "refresh_cache-20e7a993-b1fb-4359-ab35-8b0f06ca0121" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.025198] env[62070]: DEBUG nova.compute.manager [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Instance network_info: |[{"id": "ac60129b-8af9-4075-a1d9-9f0ecfe5aedf", "address": "fa:16:3e:fa:64:ad", "network": {"id": "6f72c6d4-603a-48b0-82e9-9a781b6ae750", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-967685926-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74acdcc195b04159bb779f1c87cc8e2f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e30245c5-78f5-48e6-b504-c6c21f5a9b45", "external-id": "nsx-vlan-transportzone-409", "segmentation_id": 409, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac60129b-8a", "ovs_interfaceid": "ac60129b-8af9-4075-a1d9-9f0ecfe5aedf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 773.026916] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f6ff1678-8a45-4fca-ba2d-cf78a12bb48c tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lock "242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 113.918s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.027488] env[62070]: DEBUG oslo_concurrency.lockutils [req-7498c8c7-d1f9-42cf-9592-6ab4b3bffd90 req-069dc66a-f3f9-4a01-b103-cf484d2a1f6b service nova] Acquired lock "refresh_cache-20e7a993-b1fb-4359-ab35-8b0f06ca0121" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.027728] env[62070]: DEBUG nova.network.neutron [req-7498c8c7-d1f9-42cf-9592-6ab4b3bffd90 req-069dc66a-f3f9-4a01-b103-cf484d2a1f6b service nova] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Refreshing network info cache for port ac60129b-8af9-4075-a1d9-9f0ecfe5aedf {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 773.029286] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:64:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e30245c5-78f5-48e6-b504-c6c21f5a9b45', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ac60129b-8af9-4075-a1d9-9f0ecfe5aedf', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 773.038324] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Creating folder: Project (74acdcc195b04159bb779f1c87cc8e2f). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 773.043021] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc57a59-d301-417c-ad9c-ef01c5a4abac {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.044178] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ff172fac-1393-4abf-8af8-dbf2276e8c58 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.053240] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b373c3-82f9-4f46-a97d-9f71411f5437 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.060709] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Created folder: Project (74acdcc195b04159bb779f1c87cc8e2f) in parent group-v245319. [ 773.063586] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Creating folder: Instances. Parent ref: group-v245380. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 773.063586] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-165e2d6c-c70e-44c7-819d-58664b4396d6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.072538] env[62070]: DEBUG nova.compute.provider_tree [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 773.082341] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Created folder: Instances in parent group-v245380. [ 773.082598] env[62070]: DEBUG oslo.service.loopingcall [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 773.083061] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 773.083364] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e92ac10a-eec1-49b1-a5d8-2cc1583b24c2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.103682] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 773.103682] env[62070]: value = "task-1121623" [ 773.103682] env[62070]: _type = "Task" [ 773.103682] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.114935] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121623, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.397676] env[62070]: DEBUG nova.compute.manager [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 773.421585] env[62070]: DEBUG nova.virt.hardware [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 773.422367] env[62070]: DEBUG nova.virt.hardware [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 773.422367] env[62070]: DEBUG nova.virt.hardware [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 773.422367] env[62070]: DEBUG nova.virt.hardware [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 773.422546] env[62070]: DEBUG nova.virt.hardware [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 773.422657] env[62070]: DEBUG nova.virt.hardware [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 773.422917] env[62070]: DEBUG nova.virt.hardware [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 773.423097] env[62070]: DEBUG nova.virt.hardware [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 773.423269] env[62070]: DEBUG nova.virt.hardware [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 773.423481] env[62070]: DEBUG nova.virt.hardware [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 773.423734] env[62070]: DEBUG nova.virt.hardware [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 773.424714] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d94f8cb-4cdf-4b38-af1e-86c3d4245464 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.433146] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-588cfb0c-dcdb-4762-895b-a256219ccf9f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.542381] env[62070]: DEBUG nova.compute.manager [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 773.575480] env[62070]: DEBUG nova.scheduler.client.report [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 773.615063] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121623, 'name': CreateVM_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.814084] env[62070]: DEBUG nova.network.neutron [req-7498c8c7-d1f9-42cf-9592-6ab4b3bffd90 req-069dc66a-f3f9-4a01-b103-cf484d2a1f6b service nova] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Updated VIF entry in instance network info cache for port ac60129b-8af9-4075-a1d9-9f0ecfe5aedf. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 773.814468] env[62070]: DEBUG nova.network.neutron [req-7498c8c7-d1f9-42cf-9592-6ab4b3bffd90 req-069dc66a-f3f9-4a01-b103-cf484d2a1f6b service nova] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Updating instance_info_cache with network_info: [{"id": "ac60129b-8af9-4075-a1d9-9f0ecfe5aedf", "address": "fa:16:3e:fa:64:ad", "network": {"id": "6f72c6d4-603a-48b0-82e9-9a781b6ae750", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-967685926-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74acdcc195b04159bb779f1c87cc8e2f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e30245c5-78f5-48e6-b504-c6c21f5a9b45", "external-id": "nsx-vlan-transportzone-409", "segmentation_id": 409, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac60129b-8a", "ovs_interfaceid": "ac60129b-8af9-4075-a1d9-9f0ecfe5aedf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.002996] env[62070]: DEBUG nova.compute.manager [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 774.004110] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3fb52bb-7901-4324-97ea-ae0102d6c4ac {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.073284] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.117636] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121623, 'name': CreateVM_Task, 'duration_secs': 0.964863} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.117842] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 774.118581] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.119139] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.119139] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 774.119657] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54c5cd6c-3f34-4ac0-ad9c-f4a0c2423079 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.124399] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Waiting for the task: (returnval){ [ 774.124399] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5201ee4a-cd6e-9462-a67f-6bdfee3cf798" [ 774.124399] env[62070]: _type = "Task" [ 774.124399] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.133325] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5201ee4a-cd6e-9462-a67f-6bdfee3cf798, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.276498] env[62070]: DEBUG nova.network.neutron [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Successfully updated port: 9cd48720-b8f6-4b76-ba6d-e6f0d9916a62 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 774.317303] env[62070]: DEBUG oslo_concurrency.lockutils [req-7498c8c7-d1f9-42cf-9592-6ab4b3bffd90 req-069dc66a-f3f9-4a01-b103-cf484d2a1f6b service nova] Releasing lock "refresh_cache-20e7a993-b1fb-4359-ab35-8b0f06ca0121" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.448325] env[62070]: DEBUG nova.compute.manager [req-ae8eade5-979f-48e9-96d0-76ccbc754a7f req-8d135595-d0cf-4f33-aa7e-faeed19a0a80 service nova] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Received event network-vif-plugged-9cd48720-b8f6-4b76-ba6d-e6f0d9916a62 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 774.448475] env[62070]: DEBUG oslo_concurrency.lockutils [req-ae8eade5-979f-48e9-96d0-76ccbc754a7f req-8d135595-d0cf-4f33-aa7e-faeed19a0a80 service nova] Acquiring lock "1ce155c8-9a10-4eff-b428-31889aa8f638-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.448818] env[62070]: DEBUG oslo_concurrency.lockutils [req-ae8eade5-979f-48e9-96d0-76ccbc754a7f req-8d135595-d0cf-4f33-aa7e-faeed19a0a80 service nova] Lock "1ce155c8-9a10-4eff-b428-31889aa8f638-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.448999] env[62070]: DEBUG oslo_concurrency.lockutils [req-ae8eade5-979f-48e9-96d0-76ccbc754a7f req-8d135595-d0cf-4f33-aa7e-faeed19a0a80 service nova] Lock "1ce155c8-9a10-4eff-b428-31889aa8f638-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 774.449182] env[62070]: DEBUG nova.compute.manager [req-ae8eade5-979f-48e9-96d0-76ccbc754a7f req-8d135595-d0cf-4f33-aa7e-faeed19a0a80 service nova] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] No waiting events found dispatching network-vif-plugged-9cd48720-b8f6-4b76-ba6d-e6f0d9916a62 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 774.449367] env[62070]: WARNING nova.compute.manager [req-ae8eade5-979f-48e9-96d0-76ccbc754a7f req-8d135595-d0cf-4f33-aa7e-faeed19a0a80 service nova] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Received unexpected event network-vif-plugged-9cd48720-b8f6-4b76-ba6d-e6f0d9916a62 for instance with vm_state building and task_state spawning. [ 774.449538] env[62070]: DEBUG nova.compute.manager [req-ae8eade5-979f-48e9-96d0-76ccbc754a7f req-8d135595-d0cf-4f33-aa7e-faeed19a0a80 service nova] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Received event network-changed-9cd48720-b8f6-4b76-ba6d-e6f0d9916a62 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 774.449665] env[62070]: DEBUG nova.compute.manager [req-ae8eade5-979f-48e9-96d0-76ccbc754a7f req-8d135595-d0cf-4f33-aa7e-faeed19a0a80 service nova] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Refreshing instance network info cache due to event network-changed-9cd48720-b8f6-4b76-ba6d-e6f0d9916a62. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 774.449956] env[62070]: DEBUG oslo_concurrency.lockutils [req-ae8eade5-979f-48e9-96d0-76ccbc754a7f req-8d135595-d0cf-4f33-aa7e-faeed19a0a80 service nova] Acquiring lock "refresh_cache-1ce155c8-9a10-4eff-b428-31889aa8f638" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.450068] env[62070]: DEBUG oslo_concurrency.lockutils [req-ae8eade5-979f-48e9-96d0-76ccbc754a7f req-8d135595-d0cf-4f33-aa7e-faeed19a0a80 service nova] Acquired lock "refresh_cache-1ce155c8-9a10-4eff-b428-31889aa8f638" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.450151] env[62070]: DEBUG nova.network.neutron [req-ae8eade5-979f-48e9-96d0-76ccbc754a7f req-8d135595-d0cf-4f33-aa7e-faeed19a0a80 service nova] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Refreshing network info cache for port 9cd48720-b8f6-4b76-ba6d-e6f0d9916a62 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 774.515350] env[62070]: INFO nova.compute.manager [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] instance snapshotting [ 774.519051] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d6de5e-0464-42c9-9948-b867663114d4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.539955] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c7f3129-5b8c-4791-9151-5e8434d9864a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.596097] env[62070]: DEBUG oslo_concurrency.lockutils [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.227s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 774.602032] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.770s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.602325] env[62070]: DEBUG nova.objects.instance [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lazy-loading 'resources' on Instance uuid fe378560-40b8-42c9-840d-b7d60de87c4d {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 774.636538] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5201ee4a-cd6e-9462-a67f-6bdfee3cf798, 'name': SearchDatastore_Task, 'duration_secs': 0.011994} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.636827] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.637111] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 774.637356] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.637587] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.637678] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 774.637934] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee62d2bc-8a4c-4f29-a9ba-ee43edc691a7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.674741] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 774.674975] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 774.675795] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63dc23f7-81a7-4284-b384-80f8533ccfde {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.681995] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Waiting for the task: (returnval){ [ 774.681995] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5204bcb2-d04f-dbdc-f849-19918045e0cb" [ 774.681995] env[62070]: _type = "Task" [ 774.681995] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.690362] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5204bcb2-d04f-dbdc-f849-19918045e0cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.039236] env[62070]: DEBUG nova.network.neutron [req-ae8eade5-979f-48e9-96d0-76ccbc754a7f req-8d135595-d0cf-4f33-aa7e-faeed19a0a80 service nova] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 775.054384] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Creating Snapshot of the VM instance {{(pid=62070) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 775.054384] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ff564b72-35d1-4d47-8bfc-73e8c86fbc40 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.061216] env[62070]: DEBUG oslo_vmware.api [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 775.061216] env[62070]: value = "task-1121625" [ 775.061216] env[62070]: _type = "Task" [ 775.061216] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.071645] env[62070]: DEBUG oslo_vmware.api [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121625, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.200177] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5204bcb2-d04f-dbdc-f849-19918045e0cb, 'name': SearchDatastore_Task, 'duration_secs': 0.04059} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.201221] env[62070]: DEBUG nova.network.neutron [req-ae8eade5-979f-48e9-96d0-76ccbc754a7f req-8d135595-d0cf-4f33-aa7e-faeed19a0a80 service nova] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.204026] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-656fa73c-a57f-4c87-8c21-9d0a5bc3bcbf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.209693] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Waiting for the task: (returnval){ [ 775.209693] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52676c67-2c8a-b128-4d89-41a14052dabe" [ 775.209693] env[62070]: _type = "Task" [ 775.209693] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.221094] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52676c67-2c8a-b128-4d89-41a14052dabe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.573654] env[62070]: DEBUG oslo_vmware.api [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121625, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.612769] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8403951-e074-4c8d-8655-804a0718ea95 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.624891] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de74549-3d08-4ce8-be2a-ed298ebb8249 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.658128] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dce0e943-e576-4b3e-9dbd-71cb04b2326a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.666043] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-118731d1-50bf-4603-a176-551791e9a1c5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.680341] env[62070]: DEBUG nova.compute.provider_tree [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 775.704444] env[62070]: DEBUG oslo_concurrency.lockutils [req-ae8eade5-979f-48e9-96d0-76ccbc754a7f req-8d135595-d0cf-4f33-aa7e-faeed19a0a80 service nova] Releasing lock "refresh_cache-1ce155c8-9a10-4eff-b428-31889aa8f638" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 775.720083] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52676c67-2c8a-b128-4d89-41a14052dabe, 'name': SearchDatastore_Task, 'duration_secs': 0.015237} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.720349] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 775.720597] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 20e7a993-b1fb-4359-ab35-8b0f06ca0121/20e7a993-b1fb-4359-ab35-8b0f06ca0121.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 775.721111] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b0b078c8-031f-4277-b15b-d438e3634906 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.728147] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Waiting for the task: (returnval){ [ 775.728147] env[62070]: value = "task-1121626" [ 775.728147] env[62070]: _type = "Task" [ 775.728147] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.736058] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Task: {'id': task-1121626, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.075154] env[62070]: DEBUG oslo_vmware.api [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121625, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.149295] env[62070]: INFO nova.compute.manager [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Swapping old allocation on dict_keys(['21c7c111-1b69-4468-b2c4-5dd96014fbd6']) held by migration fd5d7c3d-d0ee-488f-a0a2-a1f50cfb458f for instance [ 776.178680] env[62070]: DEBUG nova.scheduler.client.report [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Overwriting current allocation {'allocations': {'21c7c111-1b69-4468-b2c4-5dd96014fbd6': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 76}}, 'project_id': '94c6fc73d5a74adb8384fd156daf3f58', 'user_id': '0e3ee2dd49154a44bcfb94832273cd52', 'consumer_generation': 1} on consumer d148d561-3211-4f1f-965a-f2b14cd60b11 {{(pid=62070) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 776.205660] env[62070]: ERROR nova.scheduler.client.report [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [req-fa357f50-af34-47c4-8464-155ca1dcc8a0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 21c7c111-1b69-4468-b2c4-5dd96014fbd6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-fa357f50-af34-47c4-8464-155ca1dcc8a0"}]} [ 776.228275] env[62070]: DEBUG nova.scheduler.client.report [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Refreshing inventories for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 776.243911] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Task: {'id': task-1121626, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.245199] env[62070]: DEBUG nova.scheduler.client.report [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Updating ProviderTree inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 776.245396] env[62070]: DEBUG nova.compute.provider_tree [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Updating resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 generation from 76 to 77 during operation: update_inventory {{(pid=62070) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 776.245556] env[62070]: DEBUG nova.compute.provider_tree [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 776.259434] env[62070]: DEBUG nova.scheduler.client.report [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Refreshing aggregate associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, aggregates: None {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 776.286794] env[62070]: DEBUG nova.scheduler.client.report [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Refreshing trait associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 776.291299] env[62070]: DEBUG oslo_concurrency.lockutils [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquiring lock "refresh_cache-d148d561-3211-4f1f-965a-f2b14cd60b11" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.291464] env[62070]: DEBUG oslo_concurrency.lockutils [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquired lock "refresh_cache-d148d561-3211-4f1f-965a-f2b14cd60b11" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.293056] env[62070]: DEBUG nova.network.neutron [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 776.558075] env[62070]: DEBUG nova.compute.manager [req-99dac0ae-8e32-4e0d-b7cb-fe88f3afd80f req-aad8b00a-361b-47f8-9898-306c4a2eb9fe service nova] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Received event network-vif-plugged-266d9e51-fe0e-458d-a1b5-ecdae4dd0c6b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 776.558274] env[62070]: DEBUG oslo_concurrency.lockutils [req-99dac0ae-8e32-4e0d-b7cb-fe88f3afd80f req-aad8b00a-361b-47f8-9898-306c4a2eb9fe service nova] Acquiring lock "1ce155c8-9a10-4eff-b428-31889aa8f638-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.558471] env[62070]: DEBUG oslo_concurrency.lockutils [req-99dac0ae-8e32-4e0d-b7cb-fe88f3afd80f req-aad8b00a-361b-47f8-9898-306c4a2eb9fe service nova] Lock "1ce155c8-9a10-4eff-b428-31889aa8f638-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 776.558659] env[62070]: DEBUG oslo_concurrency.lockutils [req-99dac0ae-8e32-4e0d-b7cb-fe88f3afd80f req-aad8b00a-361b-47f8-9898-306c4a2eb9fe service nova] Lock "1ce155c8-9a10-4eff-b428-31889aa8f638-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 776.558800] env[62070]: DEBUG nova.compute.manager [req-99dac0ae-8e32-4e0d-b7cb-fe88f3afd80f req-aad8b00a-361b-47f8-9898-306c4a2eb9fe service nova] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] No waiting events found dispatching network-vif-plugged-266d9e51-fe0e-458d-a1b5-ecdae4dd0c6b {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 776.559449] env[62070]: WARNING nova.compute.manager [req-99dac0ae-8e32-4e0d-b7cb-fe88f3afd80f req-aad8b00a-361b-47f8-9898-306c4a2eb9fe service nova] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Received unexpected event network-vif-plugged-266d9e51-fe0e-458d-a1b5-ecdae4dd0c6b for instance with vm_state building and task_state spawning. [ 776.576324] env[62070]: DEBUG oslo_vmware.api [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121625, 'name': CreateSnapshot_Task, 'duration_secs': 1.320203} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.576324] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Created Snapshot of the VM instance {{(pid=62070) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 776.576324] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7464c91-f335-4e3c-8122-5d4ca7cc3c31 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.578911] env[62070]: DEBUG nova.network.neutron [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Successfully updated port: 266d9e51-fe0e-458d-a1b5-ecdae4dd0c6b {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 776.742944] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Task: {'id': task-1121626, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.698988} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.743254] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 20e7a993-b1fb-4359-ab35-8b0f06ca0121/20e7a993-b1fb-4359-ab35-8b0f06ca0121.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 776.743435] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 776.743686] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-caac4e27-04a1-4dd1-a713-d7981e773df5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.747210] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d929146-d2f8-412a-9cf3-a524aa649ef8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.756399] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-118fa84a-afe8-4eff-908d-671f201b714f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.759579] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Waiting for the task: (returnval){ [ 776.759579] env[62070]: value = "task-1121627" [ 776.759579] env[62070]: _type = "Task" [ 776.759579] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.788799] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ca21a7-3264-42be-9a3c-800b78ba4cd3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.795912] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Task: {'id': task-1121627, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.800910] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-979d88c6-9788-4071-9b84-54ae5a859f0a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.814567] env[62070]: DEBUG nova.compute.provider_tree [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 776.998947] env[62070]: DEBUG nova.network.neutron [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Updating instance_info_cache with network_info: [{"id": "c5e6098a-ebbb-4eee-ba72-4ddaad679830", "address": "fa:16:3e:7b:ab:3c", "network": {"id": "df33a08d-88db-4a22-846f-5b414705fc65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9d42cb2bbadf40d6b35f237f71234611", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5e6098a-eb", "ovs_interfaceid": "c5e6098a-ebbb-4eee-ba72-4ddaad679830", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.083378] env[62070]: DEBUG oslo_concurrency.lockutils [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Acquiring lock "refresh_cache-1ce155c8-9a10-4eff-b428-31889aa8f638" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.083538] env[62070]: DEBUG oslo_concurrency.lockutils [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Acquired lock "refresh_cache-1ce155c8-9a10-4eff-b428-31889aa8f638" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.083882] env[62070]: DEBUG nova.network.neutron [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 777.104058] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Creating linked-clone VM from snapshot {{(pid=62070) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 777.104579] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2ae140ce-ff81-4f7e-aaf3-a7bdbbd310e9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.113947] env[62070]: DEBUG oslo_vmware.api [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 777.113947] env[62070]: value = "task-1121628" [ 777.113947] env[62070]: _type = "Task" [ 777.113947] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.122773] env[62070]: DEBUG oslo_vmware.api [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121628, 'name': CloneVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.220701] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-e853c04a-59ac-41da-9df0-20098d493a59 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Volume attach. Driver type: vmdk {{(pid=62070) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 777.221111] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-e853c04a-59ac-41da-9df0-20098d493a59 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245379', 'volume_id': '36041495-d67a-4503-9146-0c73ccad5d5f', 'name': 'volume-36041495-d67a-4503-9146-0c73ccad5d5f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '13e3576e-4f4c-4541-a637-daa124cbf8dd', 'attached_at': '', 'detached_at': '', 'volume_id': '36041495-d67a-4503-9146-0c73ccad5d5f', 'serial': '36041495-d67a-4503-9146-0c73ccad5d5f'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 777.222443] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a94262e1-06e4-4bd2-85d3-03fcf92930e6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.964145] env[62070]: DEBUG oslo_concurrency.lockutils [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Releasing lock "refresh_cache-d148d561-3211-4f1f-965a-f2b14cd60b11" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.964843] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 777.970946] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ace7c3f6-59f9-48c3-b121-ae51397d4fec {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.974237] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cea8b91-19b3-4cd3-97c0-e03006939b72 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.982326] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Task: {'id': task-1121627, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.111068} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.987938] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 777.988406] env[62070]: DEBUG oslo_vmware.api [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121628, 'name': CloneVM_Task} progress is 94%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.990668] env[62070]: DEBUG oslo_vmware.api [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Waiting for the task: (returnval){ [ 777.990668] env[62070]: value = "task-1121629" [ 777.990668] env[62070]: _type = "Task" [ 777.990668] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.990668] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ac8fbe-5bd8-41cf-8076-3d7c9e6fd25f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.995799] env[62070]: DEBUG nova.scheduler.client.report [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Updated inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with generation 77 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 777.996061] env[62070]: DEBUG nova.compute.provider_tree [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Updating resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 generation from 77 to 78 during operation: update_inventory {{(pid=62070) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 777.996245] env[62070]: DEBUG nova.compute.provider_tree [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 778.019836] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-e853c04a-59ac-41da-9df0-20098d493a59 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] volume-36041495-d67a-4503-9146-0c73ccad5d5f/volume-36041495-d67a-4503-9146-0c73ccad5d5f.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 778.021449] env[62070]: DEBUG nova.network.neutron [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 778.023945] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.422s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.034725] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d90a22a2-6a98-4666-8f78-92839a44ee57 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.047576] env[62070]: DEBUG oslo_concurrency.lockutils [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.821s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.049275] env[62070]: INFO nova.compute.claims [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 778.060009] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] 20e7a993-b1fb-4359-ab35-8b0f06ca0121/20e7a993-b1fb-4359-ab35-8b0f06ca0121.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 778.064508] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe1fdc06-7ef3-473f-97f0-b2af2fcfe674 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.078292] env[62070]: DEBUG oslo_vmware.api [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121629, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.085449] env[62070]: DEBUG oslo_vmware.api [None req-e853c04a-59ac-41da-9df0-20098d493a59 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 778.085449] env[62070]: value = "task-1121630" [ 778.085449] env[62070]: _type = "Task" [ 778.085449] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.086522] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Waiting for the task: (returnval){ [ 778.086522] env[62070]: value = "task-1121631" [ 778.086522] env[62070]: _type = "Task" [ 778.086522] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.093040] env[62070]: INFO nova.scheduler.client.report [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Deleted allocations for instance fe378560-40b8-42c9-840d-b7d60de87c4d [ 778.110516] env[62070]: DEBUG oslo_vmware.api [None req-e853c04a-59ac-41da-9df0-20098d493a59 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121630, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.110666] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Task: {'id': task-1121631, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.479062] env[62070]: DEBUG oslo_vmware.api [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121628, 'name': CloneVM_Task} progress is 94%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.500495] env[62070]: DEBUG oslo_vmware.api [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121629, 'name': PowerOffVM_Task, 'duration_secs': 0.21203} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.500757] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 778.501438] env[62070]: DEBUG nova.virt.hardware [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:23:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='56c7fbac-8f4f-47f8-9a34-b39636f74e40',id=36,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-884828689',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 778.501658] env[62070]: DEBUG nova.virt.hardware [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 778.501811] env[62070]: DEBUG nova.virt.hardware [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 778.501992] env[62070]: DEBUG nova.virt.hardware [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 778.502162] env[62070]: DEBUG nova.virt.hardware [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 778.502335] env[62070]: DEBUG nova.virt.hardware [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 778.502550] env[62070]: DEBUG nova.virt.hardware [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 778.502728] env[62070]: DEBUG nova.virt.hardware [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 778.502899] env[62070]: DEBUG nova.virt.hardware [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 778.503077] env[62070]: DEBUG nova.virt.hardware [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 778.503258] env[62070]: DEBUG nova.virt.hardware [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 778.508446] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b55901d-317c-4de3-b2f6-aed7085dfec3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.523979] env[62070]: DEBUG oslo_vmware.api [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Waiting for the task: (returnval){ [ 778.523979] env[62070]: value = "task-1121632" [ 778.523979] env[62070]: _type = "Task" [ 778.523979] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.534268] env[62070]: DEBUG oslo_vmware.api [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121632, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.598579] env[62070]: DEBUG oslo_vmware.api [None req-e853c04a-59ac-41da-9df0-20098d493a59 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121630, 'name': ReconfigVM_Task, 'duration_secs': 0.372961} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.599287] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-e853c04a-59ac-41da-9df0-20098d493a59 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Reconfigured VM instance instance-00000030 to attach disk [datastore2] volume-36041495-d67a-4503-9146-0c73ccad5d5f/volume-36041495-d67a-4503-9146-0c73ccad5d5f.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 778.608920] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05158323-f987-45d4-93ae-1548f74eec93 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.618907] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Task: {'id': task-1121631, 'name': ReconfigVM_Task, 'duration_secs': 0.294568} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.619419] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e5d94c5b-216d-42d5-8cbd-2e80268f2a5f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "fe378560-40b8-42c9-840d-b7d60de87c4d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.850s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.624159] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Reconfigured VM instance instance-00000034 to attach disk [datastore2] 20e7a993-b1fb-4359-ab35-8b0f06ca0121/20e7a993-b1fb-4359-ab35-8b0f06ca0121.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 778.624159] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9ecff993-d1a3-4ef7-bd80-72e1e0a5ad33 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.627862] env[62070]: DEBUG oslo_vmware.api [None req-e853c04a-59ac-41da-9df0-20098d493a59 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 778.627862] env[62070]: value = "task-1121633" [ 778.627862] env[62070]: _type = "Task" [ 778.627862] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.629237] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Waiting for the task: (returnval){ [ 778.629237] env[62070]: value = "task-1121634" [ 778.629237] env[62070]: _type = "Task" [ 778.629237] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.630479] env[62070]: DEBUG nova.network.neutron [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Updating instance_info_cache with network_info: [{"id": "9cd48720-b8f6-4b76-ba6d-e6f0d9916a62", "address": "fa:16:3e:9b:09:b0", "network": {"id": "a2aef4d4-2576-4519-8c0b-df12fbc4dc84", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1294598029", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.243", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4448e8a041f494d8faf51ef6d88c635", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7908211b-df93-467b-87a8-3c3d29b03de6", "external-id": "nsx-vlan-transportzone-632", "segmentation_id": 632, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cd48720-b8", "ovs_interfaceid": "9cd48720-b8f6-4b76-ba6d-e6f0d9916a62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "266d9e51-fe0e-458d-a1b5-ecdae4dd0c6b", "address": "fa:16:3e:29:ce:d3", "network": {"id": "9227d6f0-6640-49df-9a49-8dbf2a60d1a7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-342690429", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.30", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a4448e8a041f494d8faf51ef6d88c635", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad36dd36-1d2c-4f37-a259-98ef2e440794", "external-id": "nsx-vlan-transportzone-479", "segmentation_id": 479, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap266d9e51-fe", "ovs_interfaceid": "266d9e51-fe0e-458d-a1b5-ecdae4dd0c6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.644859] env[62070]: DEBUG oslo_vmware.api [None req-e853c04a-59ac-41da-9df0-20098d493a59 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121633, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.649363] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Task: {'id': task-1121634, 'name': Rename_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.658779] env[62070]: DEBUG nova.compute.manager [req-ba82664f-ea8b-4169-91e0-565224bcca4c req-f024579f-8a04-4d42-b777-3bdaf89b62f7 service nova] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Received event network-changed-266d9e51-fe0e-458d-a1b5-ecdae4dd0c6b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 778.659055] env[62070]: DEBUG nova.compute.manager [req-ba82664f-ea8b-4169-91e0-565224bcca4c req-f024579f-8a04-4d42-b777-3bdaf89b62f7 service nova] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Refreshing instance network info cache due to event network-changed-266d9e51-fe0e-458d-a1b5-ecdae4dd0c6b. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 778.659272] env[62070]: DEBUG oslo_concurrency.lockutils [req-ba82664f-ea8b-4169-91e0-565224bcca4c req-f024579f-8a04-4d42-b777-3bdaf89b62f7 service nova] Acquiring lock "refresh_cache-1ce155c8-9a10-4eff-b428-31889aa8f638" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.977026] env[62070]: DEBUG oslo_vmware.api [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121628, 'name': CloneVM_Task, 'duration_secs': 1.77052} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.977321] env[62070]: INFO nova.virt.vmwareapi.vmops [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Created linked-clone VM from snapshot [ 778.978447] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1caa6c4-82f9-42c6-bcb3-b36c904b1e36 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.985451] env[62070]: DEBUG nova.virt.vmwareapi.images [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Uploading image 4dd20ead-44bf-4233-8544-9a7e21d4f7f2 {{(pid=62070) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 779.012104] env[62070]: DEBUG oslo_vmware.rw_handles [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 779.012104] env[62070]: value = "vm-245384" [ 779.012104] env[62070]: _type = "VirtualMachine" [ 779.012104] env[62070]: }. {{(pid=62070) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 779.012509] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-12fa23b4-fc30-4fdb-bf6b-6df1690674ff {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.020090] env[62070]: DEBUG oslo_vmware.rw_handles [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lease: (returnval){ [ 779.020090] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5254b2d4-3f66-b173-f504-799ba3a82e62" [ 779.020090] env[62070]: _type = "HttpNfcLease" [ 779.020090] env[62070]: } obtained for exporting VM: (result){ [ 779.020090] env[62070]: value = "vm-245384" [ 779.020090] env[62070]: _type = "VirtualMachine" [ 779.020090] env[62070]: }. {{(pid=62070) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 779.020700] env[62070]: DEBUG oslo_vmware.api [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the lease: (returnval){ [ 779.020700] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5254b2d4-3f66-b173-f504-799ba3a82e62" [ 779.020700] env[62070]: _type = "HttpNfcLease" [ 779.020700] env[62070]: } to be ready. {{(pid=62070) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 779.029293] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 779.029293] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5254b2d4-3f66-b173-f504-799ba3a82e62" [ 779.029293] env[62070]: _type = "HttpNfcLease" [ 779.029293] env[62070]: } is initializing. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 779.035187] env[62070]: DEBUG oslo_vmware.api [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121632, 'name': ReconfigVM_Task, 'duration_secs': 0.150152} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.036061] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86f2b66d-d04d-49ac-a466-f690f2fa3781 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.053705] env[62070]: DEBUG nova.virt.hardware [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:23:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='56c7fbac-8f4f-47f8-9a34-b39636f74e40',id=36,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-884828689',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 779.053909] env[62070]: DEBUG nova.virt.hardware [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 779.054089] env[62070]: DEBUG nova.virt.hardware [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 779.054294] env[62070]: DEBUG nova.virt.hardware [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 779.054444] env[62070]: DEBUG nova.virt.hardware [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 779.054592] env[62070]: DEBUG nova.virt.hardware [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 779.054793] env[62070]: DEBUG nova.virt.hardware [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 779.054952] env[62070]: DEBUG nova.virt.hardware [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 779.055128] env[62070]: DEBUG nova.virt.hardware [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 779.055292] env[62070]: DEBUG nova.virt.hardware [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 779.055463] env[62070]: DEBUG nova.virt.hardware [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 779.056365] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-196ab78d-e1b5-4fb4-8f58-74624e04f37e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.061802] env[62070]: DEBUG oslo_vmware.api [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Waiting for the task: (returnval){ [ 779.061802] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5266ec90-b608-0c53-b912-7869430d6b5f" [ 779.061802] env[62070]: _type = "Task" [ 779.061802] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.073770] env[62070]: DEBUG oslo_vmware.api [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5266ec90-b608-0c53-b912-7869430d6b5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.137311] env[62070]: DEBUG oslo_concurrency.lockutils [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Releasing lock "refresh_cache-1ce155c8-9a10-4eff-b428-31889aa8f638" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.137687] env[62070]: DEBUG nova.compute.manager [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Instance network_info: |[{"id": "9cd48720-b8f6-4b76-ba6d-e6f0d9916a62", "address": "fa:16:3e:9b:09:b0", "network": {"id": "a2aef4d4-2576-4519-8c0b-df12fbc4dc84", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1294598029", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.243", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4448e8a041f494d8faf51ef6d88c635", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7908211b-df93-467b-87a8-3c3d29b03de6", "external-id": "nsx-vlan-transportzone-632", "segmentation_id": 632, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cd48720-b8", "ovs_interfaceid": "9cd48720-b8f6-4b76-ba6d-e6f0d9916a62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "266d9e51-fe0e-458d-a1b5-ecdae4dd0c6b", "address": "fa:16:3e:29:ce:d3", "network": {"id": "9227d6f0-6640-49df-9a49-8dbf2a60d1a7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-342690429", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.30", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a4448e8a041f494d8faf51ef6d88c635", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad36dd36-1d2c-4f37-a259-98ef2e440794", "external-id": "nsx-vlan-transportzone-479", "segmentation_id": 479, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap266d9e51-fe", "ovs_interfaceid": "266d9e51-fe0e-458d-a1b5-ecdae4dd0c6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 779.142023] env[62070]: DEBUG oslo_concurrency.lockutils [req-ba82664f-ea8b-4169-91e0-565224bcca4c req-f024579f-8a04-4d42-b777-3bdaf89b62f7 service nova] Acquired lock "refresh_cache-1ce155c8-9a10-4eff-b428-31889aa8f638" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.142224] env[62070]: DEBUG nova.network.neutron [req-ba82664f-ea8b-4169-91e0-565224bcca4c req-f024579f-8a04-4d42-b777-3bdaf89b62f7 service nova] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Refreshing network info cache for port 266d9e51-fe0e-458d-a1b5-ecdae4dd0c6b {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 779.143588] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:09:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7908211b-df93-467b-87a8-3c3d29b03de6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9cd48720-b8f6-4b76-ba6d-e6f0d9916a62', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:ce:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ad36dd36-1d2c-4f37-a259-98ef2e440794', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '266d9e51-fe0e-458d-a1b5-ecdae4dd0c6b', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 779.159050] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Creating folder: Project (a4448e8a041f494d8faf51ef6d88c635). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 779.166783] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bdccc126-3a07-488d-9efe-58bbe7e76aa1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.168511] env[62070]: DEBUG oslo_vmware.api [None req-e853c04a-59ac-41da-9df0-20098d493a59 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121633, 'name': ReconfigVM_Task, 'duration_secs': 0.147182} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.170218] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-e853c04a-59ac-41da-9df0-20098d493a59 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245379', 'volume_id': '36041495-d67a-4503-9146-0c73ccad5d5f', 'name': 'volume-36041495-d67a-4503-9146-0c73ccad5d5f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '13e3576e-4f4c-4541-a637-daa124cbf8dd', 'attached_at': '', 'detached_at': '', 'volume_id': '36041495-d67a-4503-9146-0c73ccad5d5f', 'serial': '36041495-d67a-4503-9146-0c73ccad5d5f'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 779.175150] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Task: {'id': task-1121634, 'name': Rename_Task, 'duration_secs': 0.184873} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.175577] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 779.176113] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7b3a8bff-4142-4ab4-ac0d-82e0a8daa3f9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.182687] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Waiting for the task: (returnval){ [ 779.182687] env[62070]: value = "task-1121637" [ 779.182687] env[62070]: _type = "Task" [ 779.182687] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.183909] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Created folder: Project (a4448e8a041f494d8faf51ef6d88c635) in parent group-v245319. [ 779.184120] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Creating folder: Instances. Parent ref: group-v245385. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 779.187558] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c264e09c-5a74-4f93-b5b9-dd86788326c7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.196598] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Task: {'id': task-1121637, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.200240] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Created folder: Instances in parent group-v245385. [ 779.200468] env[62070]: DEBUG oslo.service.loopingcall [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 779.200656] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 779.201143] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ca258177-1ffd-45ed-b5b0-2d60822faf2c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.225492] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 779.225492] env[62070]: value = "task-1121639" [ 779.225492] env[62070]: _type = "Task" [ 779.225492] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.233203] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121639, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.529945] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 779.529945] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5254b2d4-3f66-b173-f504-799ba3a82e62" [ 779.529945] env[62070]: _type = "HttpNfcLease" [ 779.529945] env[62070]: } is ready. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 779.530404] env[62070]: DEBUG oslo_vmware.rw_handles [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 779.530404] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5254b2d4-3f66-b173-f504-799ba3a82e62" [ 779.530404] env[62070]: _type = "HttpNfcLease" [ 779.530404] env[62070]: }. {{(pid=62070) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 779.531117] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39fd928b-73ae-4649-af3b-2f88d2049214 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.539592] env[62070]: DEBUG oslo_vmware.rw_handles [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524cea7d-34c1-a27a-7e97-5e24fca93c1f/disk-0.vmdk from lease info. {{(pid=62070) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 779.539837] env[62070]: DEBUG oslo_vmware.rw_handles [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524cea7d-34c1-a27a-7e97-5e24fca93c1f/disk-0.vmdk for reading. {{(pid=62070) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 779.542580] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d047c893-ad6e-47c5-b301-bd83f3546e82 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.610144] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e89ac414-da74-4454-a029-656e735bcd21 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.622417] env[62070]: DEBUG oslo_vmware.api [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5266ec90-b608-0c53-b912-7869430d6b5f, 'name': SearchDatastore_Task, 'duration_secs': 0.025655} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.653755] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Reconfiguring VM instance instance-00000020 to detach disk 2000 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 779.654454] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0850a6b0-72de-46ec-a791-450a7a82915e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.669100] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-386a0f37-9659-4bab-997e-457c59765eee {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.679540] env[62070]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1f8585c6-e13e-4f9b-9cc9-8cd22496c4b5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.691802] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71c2d308-9d1b-4166-a3c9-8ca34658531a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.696068] env[62070]: DEBUG oslo_vmware.api [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Waiting for the task: (returnval){ [ 779.696068] env[62070]: value = "task-1121640" [ 779.696068] env[62070]: _type = "Task" [ 779.696068] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.715859] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Task: {'id': task-1121637, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.716452] env[62070]: DEBUG nova.compute.provider_tree [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.728080] env[62070]: DEBUG oslo_vmware.api [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121640, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.745723] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121639, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.053397] env[62070]: DEBUG nova.network.neutron [req-ba82664f-ea8b-4169-91e0-565224bcca4c req-f024579f-8a04-4d42-b777-3bdaf89b62f7 service nova] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Updated VIF entry in instance network info cache for port 266d9e51-fe0e-458d-a1b5-ecdae4dd0c6b. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 780.053397] env[62070]: DEBUG nova.network.neutron [req-ba82664f-ea8b-4169-91e0-565224bcca4c req-f024579f-8a04-4d42-b777-3bdaf89b62f7 service nova] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Updating instance_info_cache with network_info: [{"id": "9cd48720-b8f6-4b76-ba6d-e6f0d9916a62", "address": "fa:16:3e:9b:09:b0", "network": {"id": "a2aef4d4-2576-4519-8c0b-df12fbc4dc84", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1294598029", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.243", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4448e8a041f494d8faf51ef6d88c635", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7908211b-df93-467b-87a8-3c3d29b03de6", "external-id": "nsx-vlan-transportzone-632", "segmentation_id": 632, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cd48720-b8", "ovs_interfaceid": "9cd48720-b8f6-4b76-ba6d-e6f0d9916a62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "266d9e51-fe0e-458d-a1b5-ecdae4dd0c6b", "address": "fa:16:3e:29:ce:d3", "network": {"id": "9227d6f0-6640-49df-9a49-8dbf2a60d1a7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-342690429", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.30", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a4448e8a041f494d8faf51ef6d88c635", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad36dd36-1d2c-4f37-a259-98ef2e440794", "external-id": "nsx-vlan-transportzone-479", "segmentation_id": 479, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap266d9e51-fe", "ovs_interfaceid": "266d9e51-fe0e-458d-a1b5-ecdae4dd0c6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.195860] env[62070]: DEBUG oslo_vmware.api [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Task: {'id': task-1121637, 'name': PowerOnVM_Task, 'duration_secs': 0.660411} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.196293] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 780.197421] env[62070]: INFO nova.compute.manager [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Took 9.45 seconds to spawn the instance on the hypervisor. [ 780.198955] env[62070]: DEBUG nova.compute.manager [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 780.198955] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d2006fa-1906-4729-9808-15175ef36c33 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.210760] env[62070]: DEBUG oslo_vmware.api [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121640, 'name': ReconfigVM_Task, 'duration_secs': 0.219309} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.212987] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Reconfigured VM instance instance-00000020 to detach disk 2000 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 780.216868] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a55d5b-5650-4468-81f1-6011549e20b8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.234428] env[62070]: DEBUG nova.scheduler.client.report [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 780.248046] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] d148d561-3211-4f1f-965a-f2b14cd60b11/d148d561-3211-4f1f-965a-f2b14cd60b11.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 780.254747] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-688e7a24-4a59-4ec9-9bcf-06dacc20eeb7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.270027] env[62070]: DEBUG nova.objects.instance [None req-e853c04a-59ac-41da-9df0-20098d493a59 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lazy-loading 'flavor' on Instance uuid 13e3576e-4f4c-4541-a637-daa124cbf8dd {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 780.279608] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121639, 'name': CreateVM_Task, 'duration_secs': 0.54428} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.281026] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 780.282026] env[62070]: DEBUG oslo_vmware.api [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Waiting for the task: (returnval){ [ 780.282026] env[62070]: value = "task-1121641" [ 780.282026] env[62070]: _type = "Task" [ 780.282026] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.282219] env[62070]: DEBUG oslo_concurrency.lockutils [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.282500] env[62070]: DEBUG oslo_concurrency.lockutils [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.282830] env[62070]: DEBUG oslo_concurrency.lockutils [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 780.283319] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e266f414-0ce6-4064-9016-c69decded5a2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.304536] env[62070]: DEBUG oslo_vmware.api [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121641, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.304536] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Waiting for the task: (returnval){ [ 780.304536] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ee5697-f178-4c8a-af5e-279ae8aa8562" [ 780.304536] env[62070]: _type = "Task" [ 780.304536] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.314727] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52ee5697-f178-4c8a-af5e-279ae8aa8562, 'name': SearchDatastore_Task, 'duration_secs': 0.013582} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.315054] env[62070]: DEBUG oslo_concurrency.lockutils [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 780.315306] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 780.315584] env[62070]: DEBUG oslo_concurrency.lockutils [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.315684] env[62070]: DEBUG oslo_concurrency.lockutils [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.315864] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 780.316412] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a91be11-f428-404a-a93c-d9d73ec35f4a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.324738] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 780.324933] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 780.325685] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ca1a23f-7d00-4fe0-80b1-b26fc0aacefb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.332037] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Waiting for the task: (returnval){ [ 780.332037] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]522554e0-7621-2407-68ab-b3b6e5dd69b7" [ 780.332037] env[62070]: _type = "Task" [ 780.332037] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.343300] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]522554e0-7621-2407-68ab-b3b6e5dd69b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.555410] env[62070]: DEBUG oslo_concurrency.lockutils [req-ba82664f-ea8b-4169-91e0-565224bcca4c req-f024579f-8a04-4d42-b777-3bdaf89b62f7 service nova] Releasing lock "refresh_cache-1ce155c8-9a10-4eff-b428-31889aa8f638" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 780.728683] env[62070]: INFO nova.compute.manager [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Took 31.62 seconds to build instance. [ 780.751366] env[62070]: DEBUG oslo_concurrency.lockutils [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.704s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.751937] env[62070]: DEBUG nova.compute.manager [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 780.757048] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.665s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.757048] env[62070]: INFO nova.compute.claims [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 780.776119] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e853c04a-59ac-41da-9df0-20098d493a59 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "13e3576e-4f4c-4541-a637-daa124cbf8dd" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.211s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.794217] env[62070]: DEBUG oslo_vmware.api [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121641, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.804066] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquiring lock "bcafa04d-904b-4eab-aba1-35180c2d4b22" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.804394] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "bcafa04d-904b-4eab-aba1-35180c2d4b22" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.804655] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquiring lock "bcafa04d-904b-4eab-aba1-35180c2d4b22-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.804819] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "bcafa04d-904b-4eab-aba1-35180c2d4b22-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.805248] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "bcafa04d-904b-4eab-aba1-35180c2d4b22-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.807767] env[62070]: INFO nova.compute.manager [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Terminating instance [ 780.809793] env[62070]: DEBUG nova.compute.manager [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 780.810069] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 780.811008] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55da968c-4847-4685-bc5c-4d0a7f9e611c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.820994] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 780.821464] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1074c85a-edea-43e0-8b7b-dd58215d4f4b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.828110] env[62070]: DEBUG oslo_vmware.api [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 780.828110] env[62070]: value = "task-1121642" [ 780.828110] env[62070]: _type = "Task" [ 780.828110] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.839860] env[62070]: DEBUG oslo_vmware.api [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121642, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.843640] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]522554e0-7621-2407-68ab-b3b6e5dd69b7, 'name': SearchDatastore_Task, 'duration_secs': 0.010216} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.844563] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afe339b4-e403-4219-9417-0d129384ed57 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.850739] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Waiting for the task: (returnval){ [ 780.850739] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52c71cfb-c672-f109-7f2a-63407289e396" [ 780.850739] env[62070]: _type = "Task" [ 780.850739] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.858701] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52c71cfb-c672-f109-7f2a-63407289e396, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.927611] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquiring lock "5a146d8f-6921-4b3e-8696-d2804fb855ba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.927993] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "5a146d8f-6921-4b3e-8696-d2804fb855ba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.930218] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquiring lock "5a146d8f-6921-4b3e-8696-d2804fb855ba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.930218] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "5a146d8f-6921-4b3e-8696-d2804fb855ba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.930218] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "5a146d8f-6921-4b3e-8696-d2804fb855ba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.931534] env[62070]: INFO nova.compute.manager [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Terminating instance [ 780.935530] env[62070]: DEBUG nova.compute.manager [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 780.935530] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 780.936485] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293bb53c-c031-480d-9d51-04b5d5b8b24d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.944239] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 780.945176] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a3f1897c-7e74-4cee-915b-c357a8419a1c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.954463] env[62070]: DEBUG oslo_vmware.api [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 780.954463] env[62070]: value = "task-1121643" [ 780.954463] env[62070]: _type = "Task" [ 780.954463] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.963285] env[62070]: DEBUG oslo_vmware.api [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121643, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.093389] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "13e3576e-4f4c-4541-a637-daa124cbf8dd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.093847] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "13e3576e-4f4c-4541-a637-daa124cbf8dd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.094288] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "13e3576e-4f4c-4541-a637-daa124cbf8dd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.094600] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "13e3576e-4f4c-4541-a637-daa124cbf8dd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.094830] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "13e3576e-4f4c-4541-a637-daa124cbf8dd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.097768] env[62070]: INFO nova.compute.manager [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Terminating instance [ 781.100736] env[62070]: DEBUG nova.compute.manager [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 781.101060] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 781.101401] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a524d222-d767-4293-9a40-e42fdf2724fa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.109650] env[62070]: DEBUG oslo_vmware.api [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 781.109650] env[62070]: value = "task-1121644" [ 781.109650] env[62070]: _type = "Task" [ 781.109650] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.120424] env[62070]: DEBUG oslo_vmware.api [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121644, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.231468] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a1a0245d-1ee3-4e1c-adaf-6c5d189bff8e tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Lock "20e7a993-b1fb-4359-ab35-8b0f06ca0121" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 115.975s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.261826] env[62070]: DEBUG nova.compute.utils [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 781.266049] env[62070]: DEBUG nova.compute.manager [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 781.266049] env[62070]: DEBUG nova.network.neutron [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 781.295676] env[62070]: DEBUG oslo_vmware.api [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121641, 'name': ReconfigVM_Task, 'duration_secs': 0.531213} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.296269] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Reconfigured VM instance instance-00000020 to attach disk [datastore1] d148d561-3211-4f1f-965a-f2b14cd60b11/d148d561-3211-4f1f-965a-f2b14cd60b11.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 781.297602] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e075d35-4128-45ca-8cd1-672680cf1ee9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.323799] env[62070]: DEBUG nova.policy [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8733fd804aff428eb12dab1fade3597f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '99567c9d305e486f9559797458c9a0e5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 781.326598] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6350288b-d5df-4da8-891f-ca76512e1650 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.356102] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b0fa924-e7de-4a35-9937-f48c27d91cb9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.357832] env[62070]: DEBUG oslo_vmware.api [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121642, 'name': PowerOffVM_Task, 'duration_secs': 0.216516} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.361406] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 781.361864] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 781.362897] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b04eaabc-b36f-4d3f-9d89-f21e12963b85 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.384343] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bcb3660-8198-4214-9926-80463038d936 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.391144] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52c71cfb-c672-f109-7f2a-63407289e396, 'name': SearchDatastore_Task, 'duration_secs': 0.017124} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.391870] env[62070]: DEBUG oslo_concurrency.lockutils [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.392217] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 1ce155c8-9a10-4eff-b428-31889aa8f638/1ce155c8-9a10-4eff-b428-31889aa8f638.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 781.392543] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0426941d-be8a-439f-8140-480787fed954 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.397251] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 781.397539] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d9218f69-2069-4922-a8c0-734e921e0535 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.401858] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Waiting for the task: (returnval){ [ 781.401858] env[62070]: value = "task-1121646" [ 781.401858] env[62070]: _type = "Task" [ 781.401858] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.406666] env[62070]: DEBUG oslo_vmware.api [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Waiting for the task: (returnval){ [ 781.406666] env[62070]: value = "task-1121647" [ 781.406666] env[62070]: _type = "Task" [ 781.406666] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.413371] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Task: {'id': task-1121646, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.418634] env[62070]: DEBUG oslo_vmware.api [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121647, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.461140] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 781.462123] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 781.462123] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Deleting the datastore file [datastore2] bcafa04d-904b-4eab-aba1-35180c2d4b22 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 781.465228] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a83ff82b-8bb2-41ac-8d5e-7b256ea418e1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.467164] env[62070]: DEBUG oslo_vmware.api [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121643, 'name': PowerOffVM_Task, 'duration_secs': 0.279603} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.468480] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 781.468660] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 781.471462] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee896079-3be2-4671-b8a6-24e8507fa591 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.475924] env[62070]: DEBUG oslo_vmware.api [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 781.475924] env[62070]: value = "task-1121648" [ 781.475924] env[62070]: _type = "Task" [ 781.475924] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.484539] env[62070]: DEBUG oslo_vmware.api [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121648, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.539028] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 781.539265] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 781.539456] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Deleting the datastore file [datastore1] 5a146d8f-6921-4b3e-8696-d2804fb855ba {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 781.539722] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bcac9f8d-0c06-41ec-b8c8-c4d7cf5cfb83 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.546370] env[62070]: DEBUG oslo_vmware.api [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for the task: (returnval){ [ 781.546370] env[62070]: value = "task-1121650" [ 781.546370] env[62070]: _type = "Task" [ 781.546370] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.557542] env[62070]: DEBUG oslo_vmware.api [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121650, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.620785] env[62070]: DEBUG oslo_vmware.api [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121644, 'name': PowerOffVM_Task, 'duration_secs': 0.252093} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.621278] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 781.621548] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Volume detach. Driver type: vmdk {{(pid=62070) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 781.621795] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245379', 'volume_id': '36041495-d67a-4503-9146-0c73ccad5d5f', 'name': 'volume-36041495-d67a-4503-9146-0c73ccad5d5f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '13e3576e-4f4c-4541-a637-daa124cbf8dd', 'attached_at': '', 'detached_at': '', 'volume_id': '36041495-d67a-4503-9146-0c73ccad5d5f', 'serial': '36041495-d67a-4503-9146-0c73ccad5d5f'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 781.622650] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-826f8b46-6fae-4944-98c4-50232ee3c567 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.646406] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8114a888-a401-4a3f-9305-de78cd99807c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.654452] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b8edea-c181-4de2-8697-eb5c915deb53 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.698927] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e3868c-eab8-4b3d-a1ab-96222a905761 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.724116] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] The volume has not been displaced from its original location: [datastore2] volume-36041495-d67a-4503-9146-0c73ccad5d5f/volume-36041495-d67a-4503-9146-0c73ccad5d5f.vmdk. No consolidation needed. {{(pid=62070) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 781.733331] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Reconfiguring VM instance instance-00000030 to detach disk 2001 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 781.734645] env[62070]: DEBUG nova.compute.manager [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 781.740655] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f86fc4e1-ef78-4b73-aec4-a0b9ed61944d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.764772] env[62070]: DEBUG nova.network.neutron [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Successfully created port: f5d6c6b2-70c6-484b-8c25-b15a991f1434 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 781.774018] env[62070]: DEBUG nova.compute.manager [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 781.778840] env[62070]: DEBUG oslo_vmware.api [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 781.778840] env[62070]: value = "task-1121651" [ 781.778840] env[62070]: _type = "Task" [ 781.778840] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.793464] env[62070]: DEBUG oslo_vmware.api [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121651, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.920775] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Task: {'id': task-1121646, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.925961] env[62070]: DEBUG oslo_vmware.api [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121647, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.975511] env[62070]: DEBUG oslo_concurrency.lockutils [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Acquiring lock "20e7a993-b1fb-4359-ab35-8b0f06ca0121" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.975511] env[62070]: DEBUG oslo_concurrency.lockutils [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Lock "20e7a993-b1fb-4359-ab35-8b0f06ca0121" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.975511] env[62070]: DEBUG oslo_concurrency.lockutils [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Acquiring lock "20e7a993-b1fb-4359-ab35-8b0f06ca0121-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.975853] env[62070]: DEBUG oslo_concurrency.lockutils [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Lock "20e7a993-b1fb-4359-ab35-8b0f06ca0121-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.975853] env[62070]: DEBUG oslo_concurrency.lockutils [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Lock "20e7a993-b1fb-4359-ab35-8b0f06ca0121-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.983793] env[62070]: INFO nova.compute.manager [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Terminating instance [ 781.986294] env[62070]: DEBUG nova.compute.manager [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 781.986523] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 781.987346] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63cae143-012a-4732-b55b-d07c45b3847b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.993628] env[62070]: DEBUG oslo_vmware.api [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121648, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.404919} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.994364] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 781.994619] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 781.994855] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 781.995288] env[62070]: INFO nova.compute.manager [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Took 1.19 seconds to destroy the instance on the hypervisor. [ 781.995357] env[62070]: DEBUG oslo.service.loopingcall [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 781.995519] env[62070]: DEBUG nova.compute.manager [-] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 781.995679] env[62070]: DEBUG nova.network.neutron [-] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 782.000224] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 782.003132] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-729d9c15-9646-4d03-8123-e995ebde6ef0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.009708] env[62070]: DEBUG oslo_vmware.api [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Waiting for the task: (returnval){ [ 782.009708] env[62070]: value = "task-1121652" [ 782.009708] env[62070]: _type = "Task" [ 782.009708] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.019409] env[62070]: DEBUG oslo_vmware.api [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Task: {'id': task-1121652, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.059088] env[62070]: DEBUG oslo_vmware.api [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Task: {'id': task-1121650, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.312892} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.059813] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 782.059813] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 782.059813] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 782.060487] env[62070]: INFO nova.compute.manager [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Took 1.13 seconds to destroy the instance on the hypervisor. [ 782.060487] env[62070]: DEBUG oslo.service.loopingcall [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 782.060487] env[62070]: DEBUG nova.compute.manager [-] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 782.060642] env[62070]: DEBUG nova.network.neutron [-] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 782.280424] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 782.297699] env[62070]: DEBUG oslo_vmware.api [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121651, 'name': ReconfigVM_Task, 'duration_secs': 0.467687} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.298020] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Reconfigured VM instance instance-00000030 to detach disk 2001 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 782.303145] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-965c40f7-c01d-4405-b709-306f0ff0609c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.333025] env[62070]: DEBUG oslo_vmware.api [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 782.333025] env[62070]: value = "task-1121653" [ 782.333025] env[62070]: _type = "Task" [ 782.333025] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.340321] env[62070]: DEBUG oslo_vmware.api [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121653, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.342164] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbf781ae-7b03-4113-ae71-a0ce8e077a8e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.350008] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2be60bf-7a4d-49a6-bb45-9de24300bc34 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.383674] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-129ce7b3-14ef-43b7-a291-3887a762c893 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.393290] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03711f3a-4a32-4799-aaec-c3f1e8cba1cc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.407607] env[62070]: DEBUG nova.compute.provider_tree [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 782.421799] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Task: {'id': task-1121646, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.59114} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.424993] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 1ce155c8-9a10-4eff-b428-31889aa8f638/1ce155c8-9a10-4eff-b428-31889aa8f638.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 782.425251] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 782.425668] env[62070]: DEBUG oslo_vmware.api [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121647, 'name': PowerOnVM_Task, 'duration_secs': 0.578871} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.425756] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9413d328-efaf-4b60-9e59-fe671d695c59 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.427838] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 782.437027] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Waiting for the task: (returnval){ [ 782.437027] env[62070]: value = "task-1121654" [ 782.437027] env[62070]: _type = "Task" [ 782.437027] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.446079] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Task: {'id': task-1121654, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.525647] env[62070]: DEBUG oslo_vmware.api [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Task: {'id': task-1121652, 'name': PowerOffVM_Task, 'duration_secs': 0.240979} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.527035] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 782.527035] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 782.527035] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9ccb24ba-10d7-4d71-b12d-54869d0d16f2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.597858] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 782.598068] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 782.598280] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Deleting the datastore file [datastore2] 20e7a993-b1fb-4359-ab35-8b0f06ca0121 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 782.598557] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e365fb6-669f-4136-b351-a3a3c164a681 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.607759] env[62070]: DEBUG oslo_vmware.api [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Waiting for the task: (returnval){ [ 782.607759] env[62070]: value = "task-1121656" [ 782.607759] env[62070]: _type = "Task" [ 782.607759] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.618733] env[62070]: DEBUG oslo_vmware.api [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Task: {'id': task-1121656, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.711846] env[62070]: DEBUG nova.compute.manager [req-01993552-d95a-41cf-8e51-e98bfe146911 req-d1246e35-a77b-42c5-abf0-d485062d6af5 service nova] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Received event network-vif-deleted-1989edae-bc69-457e-ab09-93742636d663 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 782.712257] env[62070]: INFO nova.compute.manager [req-01993552-d95a-41cf-8e51-e98bfe146911 req-d1246e35-a77b-42c5-abf0-d485062d6af5 service nova] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Neutron deleted interface 1989edae-bc69-457e-ab09-93742636d663; detaching it from the instance and deleting it from the info cache [ 782.712835] env[62070]: DEBUG nova.network.neutron [req-01993552-d95a-41cf-8e51-e98bfe146911 req-d1246e35-a77b-42c5-abf0-d485062d6af5 service nova] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.797774] env[62070]: DEBUG nova.compute.manager [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 782.827550] env[62070]: DEBUG nova.virt.hardware [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 782.827919] env[62070]: DEBUG nova.virt.hardware [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 782.828184] env[62070]: DEBUG nova.virt.hardware [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 782.828556] env[62070]: DEBUG nova.virt.hardware [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 782.828886] env[62070]: DEBUG nova.virt.hardware [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 782.829121] env[62070]: DEBUG nova.virt.hardware [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 782.830509] env[62070]: DEBUG nova.virt.hardware [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 782.830509] env[62070]: DEBUG nova.virt.hardware [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 782.830509] env[62070]: DEBUG nova.virt.hardware [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 782.830509] env[62070]: DEBUG nova.virt.hardware [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 782.830509] env[62070]: DEBUG nova.virt.hardware [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 782.831385] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-210904b1-bfc0-47e0-a3db-95765580cb41 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.846730] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4024abbb-282f-4155-aedc-90deefd3b1e5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.851793] env[62070]: DEBUG oslo_vmware.api [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121653, 'name': ReconfigVM_Task, 'duration_secs': 0.194798} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.853090] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245379', 'volume_id': '36041495-d67a-4503-9146-0c73ccad5d5f', 'name': 'volume-36041495-d67a-4503-9146-0c73ccad5d5f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '13e3576e-4f4c-4541-a637-daa124cbf8dd', 'attached_at': '', 'detached_at': '', 'volume_id': '36041495-d67a-4503-9146-0c73ccad5d5f', 'serial': '36041495-d67a-4503-9146-0c73ccad5d5f'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 782.853090] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 782.853627] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e05e4f65-b7c7-4c1e-8733-97de0309b515 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.872510] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 782.872510] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-07f1360a-7c28-4551-9913-cdf9ae48de2b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.943834] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 782.944122] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 782.944344] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Deleting the datastore file [datastore2] 13e3576e-4f4c-4541-a637-daa124cbf8dd {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 782.945105] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57655d3e-f28a-4be6-920d-f8bf28a5553f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.948362] env[62070]: DEBUG nova.scheduler.client.report [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Updated inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with generation 78 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 782.948720] env[62070]: DEBUG nova.compute.provider_tree [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Updating resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 generation from 78 to 79 during operation: update_inventory {{(pid=62070) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 782.949032] env[62070]: DEBUG nova.compute.provider_tree [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 782.957044] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Task: {'id': task-1121654, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.18771} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.957832] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 782.958717] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-544947f9-0dee-4c20-9f87-7b07b43d7798 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.962877] env[62070]: DEBUG oslo_vmware.api [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 782.962877] env[62070]: value = "task-1121658" [ 782.962877] env[62070]: _type = "Task" [ 782.962877] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.988082] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] 1ce155c8-9a10-4eff-b428-31889aa8f638/1ce155c8-9a10-4eff-b428-31889aa8f638.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 782.990015] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e76ce6f-6baa-42eb-a6b4-5c448e269f27 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.008472] env[62070]: DEBUG oslo_vmware.api [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121658, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.013741] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Waiting for the task: (returnval){ [ 783.013741] env[62070]: value = "task-1121659" [ 783.013741] env[62070]: _type = "Task" [ 783.013741] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.023335] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Task: {'id': task-1121659, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.118404] env[62070]: DEBUG oslo_vmware.api [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Task: {'id': task-1121656, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.340971} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.118689] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 783.118900] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 783.119140] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 783.119350] env[62070]: INFO nova.compute.manager [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Took 1.13 seconds to destroy the instance on the hypervisor. [ 783.119626] env[62070]: DEBUG oslo.service.loopingcall [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 783.119937] env[62070]: DEBUG nova.compute.manager [-] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 783.120020] env[62070]: DEBUG nova.network.neutron [-] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 783.137911] env[62070]: DEBUG nova.network.neutron [-] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.146480] env[62070]: DEBUG nova.network.neutron [-] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.216868] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-58f36cb6-006d-47ff-ba98-aa66ef11c0ae {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.226565] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ac7e553-7df6-4fec-8c1d-873be138d1a6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.260481] env[62070]: DEBUG nova.compute.manager [req-01993552-d95a-41cf-8e51-e98bfe146911 req-d1246e35-a77b-42c5-abf0-d485062d6af5 service nova] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Detach interface failed, port_id=1989edae-bc69-457e-ab09-93742636d663, reason: Instance bcafa04d-904b-4eab-aba1-35180c2d4b22 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 783.446718] env[62070]: INFO nova.compute.manager [None req-18406021-c28f-46ab-8651-f32c1395e2d1 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Updating instance to original state: 'active' [ 783.459033] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.704s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.459734] env[62070]: DEBUG nova.compute.manager [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 783.463754] env[62070]: DEBUG oslo_concurrency.lockutils [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.221s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.464148] env[62070]: DEBUG nova.objects.instance [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Lazy-loading 'resources' on Instance uuid 42a5c5d8-5c3a-4568-b212-d87f2951a334 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 783.478068] env[62070]: DEBUG oslo_vmware.api [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121658, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.323748} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.478374] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 783.478605] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 783.478915] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 783.479144] env[62070]: INFO nova.compute.manager [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Took 2.38 seconds to destroy the instance on the hypervisor. [ 783.479478] env[62070]: DEBUG oslo.service.loopingcall [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 783.479746] env[62070]: DEBUG nova.compute.manager [-] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 783.479857] env[62070]: DEBUG nova.network.neutron [-] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 783.501214] env[62070]: DEBUG nova.network.neutron [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Successfully updated port: f5d6c6b2-70c6-484b-8c25-b15a991f1434 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 783.527275] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Task: {'id': task-1121659, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.543885] env[62070]: DEBUG nova.compute.manager [req-6bf41548-372c-4d1f-a6c1-3a373cb20223 req-3464e25c-fee2-41e2-87b6-8893aa51b5ba service nova] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Received event network-vif-plugged-f5d6c6b2-70c6-484b-8c25-b15a991f1434 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 783.544131] env[62070]: DEBUG oslo_concurrency.lockutils [req-6bf41548-372c-4d1f-a6c1-3a373cb20223 req-3464e25c-fee2-41e2-87b6-8893aa51b5ba service nova] Acquiring lock "b7fdf23e-1e39-4745-ae84-38b7fa89aa5d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.544347] env[62070]: DEBUG oslo_concurrency.lockutils [req-6bf41548-372c-4d1f-a6c1-3a373cb20223 req-3464e25c-fee2-41e2-87b6-8893aa51b5ba service nova] Lock "b7fdf23e-1e39-4745-ae84-38b7fa89aa5d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.544521] env[62070]: DEBUG oslo_concurrency.lockutils [req-6bf41548-372c-4d1f-a6c1-3a373cb20223 req-3464e25c-fee2-41e2-87b6-8893aa51b5ba service nova] Lock "b7fdf23e-1e39-4745-ae84-38b7fa89aa5d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.544710] env[62070]: DEBUG nova.compute.manager [req-6bf41548-372c-4d1f-a6c1-3a373cb20223 req-3464e25c-fee2-41e2-87b6-8893aa51b5ba service nova] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] No waiting events found dispatching network-vif-plugged-f5d6c6b2-70c6-484b-8c25-b15a991f1434 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 783.544864] env[62070]: WARNING nova.compute.manager [req-6bf41548-372c-4d1f-a6c1-3a373cb20223 req-3464e25c-fee2-41e2-87b6-8893aa51b5ba service nova] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Received unexpected event network-vif-plugged-f5d6c6b2-70c6-484b-8c25-b15a991f1434 for instance with vm_state building and task_state spawning. [ 783.640800] env[62070]: INFO nova.compute.manager [-] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Took 1.65 seconds to deallocate network for instance. [ 783.651024] env[62070]: INFO nova.compute.manager [-] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Took 1.59 seconds to deallocate network for instance. [ 783.958187] env[62070]: DEBUG nova.network.neutron [-] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.967113] env[62070]: DEBUG nova.compute.utils [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 783.971517] env[62070]: DEBUG nova.compute.manager [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 783.971727] env[62070]: DEBUG nova.network.neutron [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 784.003678] env[62070]: DEBUG oslo_concurrency.lockutils [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Acquiring lock "refresh_cache-b7fdf23e-1e39-4745-ae84-38b7fa89aa5d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 784.003895] env[62070]: DEBUG oslo_concurrency.lockutils [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Acquired lock "refresh_cache-b7fdf23e-1e39-4745-ae84-38b7fa89aa5d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.004760] env[62070]: DEBUG nova.network.neutron [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 784.028553] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Task: {'id': task-1121659, 'name': ReconfigVM_Task, 'duration_secs': 0.527076} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.032254] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Reconfigured VM instance instance-00000035 to attach disk [datastore2] 1ce155c8-9a10-4eff-b428-31889aa8f638/1ce155c8-9a10-4eff-b428-31889aa8f638.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 784.033244] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d47d0b88-9b29-4181-af20-eac7dfd9b852 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.040836] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Waiting for the task: (returnval){ [ 784.040836] env[62070]: value = "task-1121660" [ 784.040836] env[62070]: _type = "Task" [ 784.040836] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.049545] env[62070]: DEBUG nova.policy [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0c0209c9a3c7400990f8f1d27a29e3d0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7525c88cd803420094fb2af593ba5d65', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 784.058870] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Task: {'id': task-1121660, 'name': Rename_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.133528] env[62070]: DEBUG oslo_concurrency.lockutils [None req-598fe719-c279-4541-a7db-5afa7e334944 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "interface-0ac963b1-120a-464b-8228-3393135dd182-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.133870] env[62070]: DEBUG oslo_concurrency.lockutils [None req-598fe719-c279-4541-a7db-5afa7e334944 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "interface-0ac963b1-120a-464b-8228-3393135dd182-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.134228] env[62070]: DEBUG nova.objects.instance [None req-598fe719-c279-4541-a7db-5afa7e334944 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lazy-loading 'flavor' on Instance uuid 0ac963b1-120a-464b-8228-3393135dd182 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 784.147813] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.155646] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.270613] env[62070]: DEBUG nova.network.neutron [-] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.458018] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8a76a9-b309-40b5-b8c8-1e82f7b8fcef {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.464077] env[62070]: DEBUG nova.network.neutron [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Successfully created port: d7ba9e32-c151-4f86-90ea-ba3a9dc7ea10 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 784.469365] env[62070]: INFO nova.compute.manager [-] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Took 1.35 seconds to deallocate network for instance. [ 784.472152] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b9d9831-4fdb-458e-b380-616fde2af05e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.480066] env[62070]: DEBUG nova.compute.manager [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 784.515236] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27587eb1-c4f5-475e-a7ce-6dcaba579fff {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.525524] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d0a603-2974-41df-a7d1-62074b24e02a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.541379] env[62070]: DEBUG nova.compute.provider_tree [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 784.551090] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Task: {'id': task-1121660, 'name': Rename_Task, 'duration_secs': 0.181227} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.551459] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 784.551796] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d9975bc-73b4-4c38-97f1-26ef07cba6d8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.559071] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Waiting for the task: (returnval){ [ 784.559071] env[62070]: value = "task-1121661" [ 784.559071] env[62070]: _type = "Task" [ 784.559071] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.563157] env[62070]: DEBUG nova.network.neutron [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 784.570402] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Task: {'id': task-1121661, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.638299] env[62070]: DEBUG nova.objects.instance [None req-598fe719-c279-4541-a7db-5afa7e334944 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lazy-loading 'pci_requests' on Instance uuid 0ac963b1-120a-464b-8228-3393135dd182 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 784.714855] env[62070]: DEBUG nova.network.neutron [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Updating instance_info_cache with network_info: [{"id": "f5d6c6b2-70c6-484b-8c25-b15a991f1434", "address": "fa:16:3e:39:39:3b", "network": {"id": "138c97e2-fdc6-4a10-a96a-4a15e4f93fd6", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-576494802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99567c9d305e486f9559797458c9a0e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c842425c-544e-4ce2-9657-512723bd318e", "external-id": "nsx-vlan-transportzone-80", "segmentation_id": 80, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5d6c6b2-70", "ovs_interfaceid": "f5d6c6b2-70c6-484b-8c25-b15a991f1434", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.776016] env[62070]: INFO nova.compute.manager [-] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Took 1.29 seconds to deallocate network for instance. [ 784.986225] env[62070]: DEBUG oslo_concurrency.lockutils [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.047656] env[62070]: DEBUG nova.scheduler.client.report [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 785.071603] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Task: {'id': task-1121661, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.143344] env[62070]: DEBUG nova.objects.base [None req-598fe719-c279-4541-a7db-5afa7e334944 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Object Instance<0ac963b1-120a-464b-8228-3393135dd182> lazy-loaded attributes: flavor,pci_requests {{(pid=62070) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 785.143579] env[62070]: DEBUG nova.network.neutron [None req-598fe719-c279-4541-a7db-5afa7e334944 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 785.216441] env[62070]: DEBUG oslo_concurrency.lockutils [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Releasing lock "refresh_cache-b7fdf23e-1e39-4745-ae84-38b7fa89aa5d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 785.216802] env[62070]: DEBUG nova.compute.manager [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Instance network_info: |[{"id": "f5d6c6b2-70c6-484b-8c25-b15a991f1434", "address": "fa:16:3e:39:39:3b", "network": {"id": "138c97e2-fdc6-4a10-a96a-4a15e4f93fd6", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-576494802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99567c9d305e486f9559797458c9a0e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c842425c-544e-4ce2-9657-512723bd318e", "external-id": "nsx-vlan-transportzone-80", "segmentation_id": 80, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5d6c6b2-70", "ovs_interfaceid": "f5d6c6b2-70c6-484b-8c25-b15a991f1434", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 785.217329] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:39:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c842425c-544e-4ce2-9657-512723bd318e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f5d6c6b2-70c6-484b-8c25-b15a991f1434', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 785.225338] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Creating folder: Project (99567c9d305e486f9559797458c9a0e5). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 785.225647] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4ddafc13-af10-4e62-b641-63daa0d094b6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.235486] env[62070]: DEBUG oslo_concurrency.lockutils [None req-598fe719-c279-4541-a7db-5afa7e334944 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "interface-0ac963b1-120a-464b-8228-3393135dd182-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.102s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.238153] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Created folder: Project (99567c9d305e486f9559797458c9a0e5) in parent group-v245319. [ 785.238365] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Creating folder: Instances. Parent ref: group-v245388. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 785.239052] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ad96cb17-f6b6-4db5-bc4d-4b1c3a7226bd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.248653] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Created folder: Instances in parent group-v245388. [ 785.248848] env[62070]: DEBUG oslo.service.loopingcall [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 785.249076] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 785.249342] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b0caaa18-98bb-4b0d-ac90-2fb390ccc676 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.269377] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 785.269377] env[62070]: value = "task-1121664" [ 785.269377] env[62070]: _type = "Task" [ 785.269377] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.275203] env[62070]: DEBUG nova.compute.manager [req-56afa0cb-ce37-4976-9f47-b45af3da99a7 req-cb6b71bd-7ab8-495a-a801-c1574ca8a295 service nova] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Received event network-vif-deleted-af767f4c-7d63-455c-b3df-198b16341c00 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 785.275407] env[62070]: DEBUG nova.compute.manager [req-56afa0cb-ce37-4976-9f47-b45af3da99a7 req-cb6b71bd-7ab8-495a-a801-c1574ca8a295 service nova] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Received event network-vif-deleted-ac60129b-8af9-4075-a1d9-9f0ecfe5aedf {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 785.275603] env[62070]: DEBUG nova.compute.manager [req-56afa0cb-ce37-4976-9f47-b45af3da99a7 req-cb6b71bd-7ab8-495a-a801-c1574ca8a295 service nova] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Received event network-vif-deleted-6d2286a1-36df-47b8-93e7-424a5f04ce29 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 785.281156] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121664, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.339282] env[62070]: INFO nova.compute.manager [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Took 0.57 seconds to detach 1 volumes for instance. [ 785.520217] env[62070]: DEBUG nova.compute.manager [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 785.541916] env[62070]: DEBUG nova.virt.hardware [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 785.542252] env[62070]: DEBUG nova.virt.hardware [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 785.542778] env[62070]: DEBUG nova.virt.hardware [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 785.542778] env[62070]: DEBUG nova.virt.hardware [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 785.542905] env[62070]: DEBUG nova.virt.hardware [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 785.542978] env[62070]: DEBUG nova.virt.hardware [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 785.543169] env[62070]: DEBUG nova.virt.hardware [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 785.543337] env[62070]: DEBUG nova.virt.hardware [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 785.543525] env[62070]: DEBUG nova.virt.hardware [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 785.543841] env[62070]: DEBUG nova.virt.hardware [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 785.543957] env[62070]: DEBUG nova.virt.hardware [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 785.544882] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cdfa098-9582-4e71-91e7-bbcd90c8b525 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.554402] env[62070]: DEBUG oslo_concurrency.lockutils [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.091s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.556698] env[62070]: DEBUG oslo_concurrency.lockutils [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.797s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.556965] env[62070]: DEBUG nova.objects.instance [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Lazy-loading 'resources' on Instance uuid a3fcb849-b015-43aa-8f95-0d4a87e2cecc {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 785.559238] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-057130d4-7042-4be4-8476-c95e0d6f17f9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.581831] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Task: {'id': task-1121661, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.582894] env[62070]: INFO nova.scheduler.client.report [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Deleted allocations for instance 42a5c5d8-5c3a-4568-b212-d87f2951a334 [ 785.779253] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121664, 'name': CreateVM_Task, 'duration_secs': 0.417589} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.779433] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 785.780173] env[62070]: DEBUG oslo_concurrency.lockutils [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.780355] env[62070]: DEBUG oslo_concurrency.lockutils [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.780698] env[62070]: DEBUG oslo_concurrency.lockutils [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 785.780969] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0471ae75-a40a-4c76-9ee8-77da89a83045 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.785976] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Waiting for the task: (returnval){ [ 785.785976] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52aad1b1-024b-93b3-1513-96b13034d592" [ 785.785976] env[62070]: _type = "Task" [ 785.785976] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.793985] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52aad1b1-024b-93b3-1513-96b13034d592, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.846272] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.944097] env[62070]: DEBUG oslo_concurrency.lockutils [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquiring lock "d148d561-3211-4f1f-965a-f2b14cd60b11" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.944449] env[62070]: DEBUG oslo_concurrency.lockutils [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Lock "d148d561-3211-4f1f-965a-f2b14cd60b11" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.944714] env[62070]: DEBUG oslo_concurrency.lockutils [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquiring lock "d148d561-3211-4f1f-965a-f2b14cd60b11-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.944942] env[62070]: DEBUG oslo_concurrency.lockutils [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Lock "d148d561-3211-4f1f-965a-f2b14cd60b11-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.945160] env[62070]: DEBUG oslo_concurrency.lockutils [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Lock "d148d561-3211-4f1f-965a-f2b14cd60b11-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.947732] env[62070]: INFO nova.compute.manager [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Terminating instance [ 785.949884] env[62070]: DEBUG nova.compute.manager [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 785.950130] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 785.951039] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30db4189-7dfc-47b6-8c6d-d3768a7d5bc6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.959386] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 785.959546] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fb993706-bb64-4c5b-9818-e3ba2710ce5b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.966105] env[62070]: DEBUG oslo_vmware.api [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Waiting for the task: (returnval){ [ 785.966105] env[62070]: value = "task-1121665" [ 785.966105] env[62070]: _type = "Task" [ 785.966105] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.974872] env[62070]: DEBUG oslo_vmware.api [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121665, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.080520] env[62070]: DEBUG oslo_vmware.api [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Task: {'id': task-1121661, 'name': PowerOnVM_Task, 'duration_secs': 1.167332} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.080520] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 786.080520] env[62070]: INFO nova.compute.manager [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Took 12.68 seconds to spawn the instance on the hypervisor. [ 786.080769] env[62070]: DEBUG nova.compute.manager [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 786.081487] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2c7725-b151-41c9-966b-513df7d8d1b6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.099259] env[62070]: DEBUG oslo_concurrency.lockutils [None req-912d068e-ad4d-4a8c-af8e-8d1e2033b501 tempest-ServerDiagnosticsNegativeTest-1306946091 tempest-ServerDiagnosticsNegativeTest-1306946091-project-member] Lock "42a5c5d8-5c3a-4568-b212-d87f2951a334" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.836s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.298805] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52aad1b1-024b-93b3-1513-96b13034d592, 'name': SearchDatastore_Task, 'duration_secs': 0.010709} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.299148] env[62070]: DEBUG oslo_concurrency.lockutils [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.299424] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 786.299719] env[62070]: DEBUG oslo_concurrency.lockutils [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.299836] env[62070]: DEBUG oslo_concurrency.lockutils [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.300066] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 786.300396] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-96b0cfa9-9c94-4be0-b8fc-9e469eff2fd7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.310036] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 786.310036] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 786.310462] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc95c61a-68c1-49b6-9faf-f21998a5919e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.315328] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Waiting for the task: (returnval){ [ 786.315328] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5219a717-ca2c-fd5f-2198-677cec2c7bc6" [ 786.315328] env[62070]: _type = "Task" [ 786.315328] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.327612] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5219a717-ca2c-fd5f-2198-677cec2c7bc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.449303] env[62070]: DEBUG nova.network.neutron [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Successfully updated port: d7ba9e32-c151-4f86-90ea-ba3a9dc7ea10 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 786.465556] env[62070]: DEBUG nova.compute.manager [req-67629a29-d36f-46f5-bcc5-954f941f80a7 req-b753ee97-9d34-4fd1-a366-0816be199e1e service nova] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Received event network-changed-f5d6c6b2-70c6-484b-8c25-b15a991f1434 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 786.465750] env[62070]: DEBUG nova.compute.manager [req-67629a29-d36f-46f5-bcc5-954f941f80a7 req-b753ee97-9d34-4fd1-a366-0816be199e1e service nova] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Refreshing instance network info cache due to event network-changed-f5d6c6b2-70c6-484b-8c25-b15a991f1434. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 786.466015] env[62070]: DEBUG oslo_concurrency.lockutils [req-67629a29-d36f-46f5-bcc5-954f941f80a7 req-b753ee97-9d34-4fd1-a366-0816be199e1e service nova] Acquiring lock "refresh_cache-b7fdf23e-1e39-4745-ae84-38b7fa89aa5d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.466202] env[62070]: DEBUG oslo_concurrency.lockutils [req-67629a29-d36f-46f5-bcc5-954f941f80a7 req-b753ee97-9d34-4fd1-a366-0816be199e1e service nova] Acquired lock "refresh_cache-b7fdf23e-1e39-4745-ae84-38b7fa89aa5d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.466375] env[62070]: DEBUG nova.network.neutron [req-67629a29-d36f-46f5-bcc5-954f941f80a7 req-b753ee97-9d34-4fd1-a366-0816be199e1e service nova] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Refreshing network info cache for port f5d6c6b2-70c6-484b-8c25-b15a991f1434 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 786.481638] env[62070]: DEBUG oslo_vmware.api [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121665, 'name': PowerOffVM_Task, 'duration_secs': 0.226837} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.481902] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 786.482201] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 786.482496] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-014eb6fc-672f-4d99-936a-15e76fda7ad7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.533048] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1d5405c-de9a-4beb-87bc-ac2d6fb018bc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.540984] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da6aa2ff-36b6-4f1d-9fc1-9346ef182b93 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.574480] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a62344e-7910-4330-9973-e5ae29512fe8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.577225] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 786.577483] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 786.577706] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Deleting the datastore file [datastore1] d148d561-3211-4f1f-965a-f2b14cd60b11 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 786.577996] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-baa9c2f6-7a9a-4caf-8f32-9df55644c955 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.588843] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1852ae81-57e3-468e-b477-fc0b4ce3db74 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.593227] env[62070]: DEBUG oslo_vmware.api [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Waiting for the task: (returnval){ [ 786.593227] env[62070]: value = "task-1121667" [ 786.593227] env[62070]: _type = "Task" [ 786.593227] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.610916] env[62070]: DEBUG nova.compute.provider_tree [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 786.617096] env[62070]: INFO nova.compute.manager [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Took 35.37 seconds to build instance. [ 786.621490] env[62070]: DEBUG oslo_vmware.api [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121667, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.829129] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5219a717-ca2c-fd5f-2198-677cec2c7bc6, 'name': SearchDatastore_Task, 'duration_secs': 0.019593} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.830120] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4d79fc5-7d29-4515-8b1d-f88579f58e27 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.835347] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Waiting for the task: (returnval){ [ 786.835347] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]529a9e55-327b-da10-95b7-d86895935e2b" [ 786.835347] env[62070]: _type = "Task" [ 786.835347] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.842591] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]529a9e55-327b-da10-95b7-d86895935e2b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.951516] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "refresh_cache-10672096-00ba-4481-8ab3-085a185076db" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.951691] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquired lock "refresh_cache-10672096-00ba-4481-8ab3-085a185076db" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.951835] env[62070]: DEBUG nova.network.neutron [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 787.401158] env[62070]: DEBUG nova.scheduler.client.report [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 787.407776] env[62070]: DEBUG oslo_concurrency.lockutils [None req-174e44be-5f42-4d72-b785-d7e1a5fd4e8e tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Lock "1ce155c8-9a10-4eff-b428-31889aa8f638" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 114.121s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.428658] env[62070]: DEBUG oslo_vmware.api [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Task: {'id': task-1121667, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.348219} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.436611] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 787.436738] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 787.436967] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 787.438525] env[62070]: INFO nova.compute.manager [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Took 1.49 seconds to destroy the instance on the hypervisor. [ 787.438874] env[62070]: DEBUG oslo.service.loopingcall [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 787.439169] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]529a9e55-327b-da10-95b7-d86895935e2b, 'name': SearchDatastore_Task, 'duration_secs': 0.025621} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.440865] env[62070]: DEBUG nova.compute.manager [-] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 787.441018] env[62070]: DEBUG nova.network.neutron [-] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 787.442930] env[62070]: DEBUG oslo_concurrency.lockutils [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.443253] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] b7fdf23e-1e39-4745-ae84-38b7fa89aa5d/b7fdf23e-1e39-4745-ae84-38b7fa89aa5d.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 787.447296] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-30f75ab8-cf04-4c12-af6d-09d2ee122a1d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.456923] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Waiting for the task: (returnval){ [ 787.456923] env[62070]: value = "task-1121668" [ 787.456923] env[62070]: _type = "Task" [ 787.456923] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.472526] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Task: {'id': task-1121668, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.534302] env[62070]: DEBUG nova.network.neutron [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 787.652388] env[62070]: DEBUG nova.network.neutron [req-67629a29-d36f-46f5-bcc5-954f941f80a7 req-b753ee97-9d34-4fd1-a366-0816be199e1e service nova] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Updated VIF entry in instance network info cache for port f5d6c6b2-70c6-484b-8c25-b15a991f1434. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 787.652748] env[62070]: DEBUG nova.network.neutron [req-67629a29-d36f-46f5-bcc5-954f941f80a7 req-b753ee97-9d34-4fd1-a366-0816be199e1e service nova] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Updating instance_info_cache with network_info: [{"id": "f5d6c6b2-70c6-484b-8c25-b15a991f1434", "address": "fa:16:3e:39:39:3b", "network": {"id": "138c97e2-fdc6-4a10-a96a-4a15e4f93fd6", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-576494802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99567c9d305e486f9559797458c9a0e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c842425c-544e-4ce2-9657-512723bd318e", "external-id": "nsx-vlan-transportzone-80", "segmentation_id": 80, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5d6c6b2-70", "ovs_interfaceid": "f5d6c6b2-70c6-484b-8c25-b15a991f1434", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.716471] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "c16d175c-0b23-4f72-bdb0-844c6f80fd32" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.716725] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "c16d175c-0b23-4f72-bdb0-844c6f80fd32" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.718791] env[62070]: DEBUG nova.compute.manager [req-b90272c2-35c1-494f-8260-017f0a8ed080 req-c5431c48-8493-427c-a4c6-0e0fad9d40e1 service nova] [instance: 10672096-00ba-4481-8ab3-085a185076db] Received event network-vif-plugged-d7ba9e32-c151-4f86-90ea-ba3a9dc7ea10 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 787.719210] env[62070]: DEBUG oslo_concurrency.lockutils [req-b90272c2-35c1-494f-8260-017f0a8ed080 req-c5431c48-8493-427c-a4c6-0e0fad9d40e1 service nova] Acquiring lock "10672096-00ba-4481-8ab3-085a185076db-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.719309] env[62070]: DEBUG oslo_concurrency.lockutils [req-b90272c2-35c1-494f-8260-017f0a8ed080 req-c5431c48-8493-427c-a4c6-0e0fad9d40e1 service nova] Lock "10672096-00ba-4481-8ab3-085a185076db-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.719438] env[62070]: DEBUG oslo_concurrency.lockutils [req-b90272c2-35c1-494f-8260-017f0a8ed080 req-c5431c48-8493-427c-a4c6-0e0fad9d40e1 service nova] Lock "10672096-00ba-4481-8ab3-085a185076db-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.719672] env[62070]: DEBUG nova.compute.manager [req-b90272c2-35c1-494f-8260-017f0a8ed080 req-c5431c48-8493-427c-a4c6-0e0fad9d40e1 service nova] [instance: 10672096-00ba-4481-8ab3-085a185076db] No waiting events found dispatching network-vif-plugged-d7ba9e32-c151-4f86-90ea-ba3a9dc7ea10 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 787.719812] env[62070]: WARNING nova.compute.manager [req-b90272c2-35c1-494f-8260-017f0a8ed080 req-c5431c48-8493-427c-a4c6-0e0fad9d40e1 service nova] [instance: 10672096-00ba-4481-8ab3-085a185076db] Received unexpected event network-vif-plugged-d7ba9e32-c151-4f86-90ea-ba3a9dc7ea10 for instance with vm_state building and task_state spawning. [ 787.719964] env[62070]: DEBUG nova.compute.manager [req-b90272c2-35c1-494f-8260-017f0a8ed080 req-c5431c48-8493-427c-a4c6-0e0fad9d40e1 service nova] [instance: 10672096-00ba-4481-8ab3-085a185076db] Received event network-changed-d7ba9e32-c151-4f86-90ea-ba3a9dc7ea10 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 787.720133] env[62070]: DEBUG nova.compute.manager [req-b90272c2-35c1-494f-8260-017f0a8ed080 req-c5431c48-8493-427c-a4c6-0e0fad9d40e1 service nova] [instance: 10672096-00ba-4481-8ab3-085a185076db] Refreshing instance network info cache due to event network-changed-d7ba9e32-c151-4f86-90ea-ba3a9dc7ea10. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 787.720332] env[62070]: DEBUG oslo_concurrency.lockutils [req-b90272c2-35c1-494f-8260-017f0a8ed080 req-c5431c48-8493-427c-a4c6-0e0fad9d40e1 service nova] Acquiring lock "refresh_cache-10672096-00ba-4481-8ab3-085a185076db" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.739237] env[62070]: DEBUG nova.network.neutron [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Updating instance_info_cache with network_info: [{"id": "d7ba9e32-c151-4f86-90ea-ba3a9dc7ea10", "address": "fa:16:3e:0b:04:cf", "network": {"id": "25f38244-53c8-44e1-a7a9-a8a37ce83ffa", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1382757376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7525c88cd803420094fb2af593ba5d65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7ba9e32-c1", "ovs_interfaceid": "d7ba9e32-c151-4f86-90ea-ba3a9dc7ea10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.924427] env[62070]: DEBUG nova.compute.manager [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 787.928734] env[62070]: DEBUG oslo_concurrency.lockutils [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.372s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.935646] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.595s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.938517] env[62070]: INFO nova.compute.claims [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 787.953434] env[62070]: DEBUG oslo_concurrency.lockutils [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "interface-0ac963b1-120a-464b-8228-3393135dd182-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.953434] env[62070]: DEBUG oslo_concurrency.lockutils [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "interface-0ac963b1-120a-464b-8228-3393135dd182-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.953434] env[62070]: DEBUG nova.objects.instance [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lazy-loading 'flavor' on Instance uuid 0ac963b1-120a-464b-8228-3393135dd182 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 787.972730] env[62070]: INFO nova.scheduler.client.report [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Deleted allocations for instance a3fcb849-b015-43aa-8f95-0d4a87e2cecc [ 787.983034] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Task: {'id': task-1121668, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.155922] env[62070]: DEBUG oslo_concurrency.lockutils [req-67629a29-d36f-46f5-bcc5-954f941f80a7 req-b753ee97-9d34-4fd1-a366-0816be199e1e service nova] Releasing lock "refresh_cache-b7fdf23e-1e39-4745-ae84-38b7fa89aa5d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.241473] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Releasing lock "refresh_cache-10672096-00ba-4481-8ab3-085a185076db" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.242166] env[62070]: DEBUG nova.compute.manager [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Instance network_info: |[{"id": "d7ba9e32-c151-4f86-90ea-ba3a9dc7ea10", "address": "fa:16:3e:0b:04:cf", "network": {"id": "25f38244-53c8-44e1-a7a9-a8a37ce83ffa", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1382757376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7525c88cd803420094fb2af593ba5d65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7ba9e32-c1", "ovs_interfaceid": "d7ba9e32-c151-4f86-90ea-ba3a9dc7ea10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 788.242516] env[62070]: DEBUG oslo_concurrency.lockutils [req-b90272c2-35c1-494f-8260-017f0a8ed080 req-c5431c48-8493-427c-a4c6-0e0fad9d40e1 service nova] Acquired lock "refresh_cache-10672096-00ba-4481-8ab3-085a185076db" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.242715] env[62070]: DEBUG nova.network.neutron [req-b90272c2-35c1-494f-8260-017f0a8ed080 req-c5431c48-8493-427c-a4c6-0e0fad9d40e1 service nova] [instance: 10672096-00ba-4481-8ab3-085a185076db] Refreshing network info cache for port d7ba9e32-c151-4f86-90ea-ba3a9dc7ea10 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 788.243979] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:04:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e1a5c1-4ae7-409b-8de7-d401684ef60d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd7ba9e32-c151-4f86-90ea-ba3a9dc7ea10', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 788.252847] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Creating folder: Project (7525c88cd803420094fb2af593ba5d65). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 788.253865] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ffeb6761-8d56-499e-a84a-1dd805c495f7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.265249] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Created folder: Project (7525c88cd803420094fb2af593ba5d65) in parent group-v245319. [ 788.265456] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Creating folder: Instances. Parent ref: group-v245391. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 788.265706] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0b56d213-7d36-4cc5-bf51-e20ecda2618a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.275997] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Created folder: Instances in parent group-v245391. [ 788.275997] env[62070]: DEBUG oslo.service.loopingcall [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 788.275997] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10672096-00ba-4481-8ab3-085a185076db] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 788.276246] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dc2f38a4-ae41-4bbd-b027-8521d962b205 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.300311] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 788.300311] env[62070]: value = "task-1121671" [ 788.300311] env[62070]: _type = "Task" [ 788.300311] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.309358] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121671, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.457108] env[62070]: DEBUG oslo_concurrency.lockutils [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.472587] env[62070]: DEBUG nova.network.neutron [-] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.474142] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Task: {'id': task-1121668, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.620114} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.474218] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] b7fdf23e-1e39-4745-ae84-38b7fa89aa5d/b7fdf23e-1e39-4745-ae84-38b7fa89aa5d.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 788.474417] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 788.474664] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a1e8c71c-8d61-4ad2-be2a-412d9663bdc6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.480988] env[62070]: DEBUG oslo_concurrency.lockutils [None req-693c53de-9486-417a-87eb-2668a7f83b07 tempest-ServerAddressesTestJSON-560794175 tempest-ServerAddressesTestJSON-560794175-project-member] Lock "a3fcb849-b015-43aa-8f95-0d4a87e2cecc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.045s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.482742] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Waiting for the task: (returnval){ [ 788.482742] env[62070]: value = "task-1121672" [ 788.482742] env[62070]: _type = "Task" [ 788.482742] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.492796] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Task: {'id': task-1121672, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.590641] env[62070]: DEBUG nova.objects.instance [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lazy-loading 'pci_requests' on Instance uuid 0ac963b1-120a-464b-8228-3393135dd182 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 788.797143] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Acquiring lock "1ce155c8-9a10-4eff-b428-31889aa8f638" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.797463] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Lock "1ce155c8-9a10-4eff-b428-31889aa8f638" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.797694] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Acquiring lock "1ce155c8-9a10-4eff-b428-31889aa8f638-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.797929] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Lock "1ce155c8-9a10-4eff-b428-31889aa8f638-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.798124] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Lock "1ce155c8-9a10-4eff-b428-31889aa8f638-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.800607] env[62070]: INFO nova.compute.manager [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Terminating instance [ 788.805886] env[62070]: DEBUG nova.compute.manager [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 788.806132] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 788.806937] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-454bd63a-ea2a-452c-b9f4-65e04b9ee4ef {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.819667] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121671, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.822860] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 788.822860] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7f0cf049-bb8f-4470-9842-2a6a289dccf3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.829373] env[62070]: DEBUG oslo_vmware.api [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Waiting for the task: (returnval){ [ 788.829373] env[62070]: value = "task-1121673" [ 788.829373] env[62070]: _type = "Task" [ 788.829373] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.838113] env[62070]: DEBUG oslo_vmware.api [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Task: {'id': task-1121673, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.846383] env[62070]: DEBUG nova.compute.manager [req-32a8518a-6b62-427d-8bbf-4f749d0138ac req-8fca9d39-7412-47c9-9d01-b92b9559a1fb service nova] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Received event network-vif-deleted-c5e6098a-ebbb-4eee-ba72-4ddaad679830 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 788.978117] env[62070]: INFO nova.compute.manager [-] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Took 1.54 seconds to deallocate network for instance. [ 789.001313] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Task: {'id': task-1121672, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.11829} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.004288] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 789.005412] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a8771f8-5675-4313-830f-da2744dc6f3a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.032904] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] b7fdf23e-1e39-4745-ae84-38b7fa89aa5d/b7fdf23e-1e39-4745-ae84-38b7fa89aa5d.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 789.039931] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd785559-299f-4dea-af59-9caae811266c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.061927] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Waiting for the task: (returnval){ [ 789.061927] env[62070]: value = "task-1121674" [ 789.061927] env[62070]: _type = "Task" [ 789.061927] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.071968] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Task: {'id': task-1121674, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.096336] env[62070]: DEBUG nova.objects.base [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Object Instance<0ac963b1-120a-464b-8228-3393135dd182> lazy-loaded attributes: flavor,pci_requests {{(pid=62070) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 789.096641] env[62070]: DEBUG nova.network.neutron [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 789.107736] env[62070]: DEBUG nova.network.neutron [req-b90272c2-35c1-494f-8260-017f0a8ed080 req-c5431c48-8493-427c-a4c6-0e0fad9d40e1 service nova] [instance: 10672096-00ba-4481-8ab3-085a185076db] Updated VIF entry in instance network info cache for port d7ba9e32-c151-4f86-90ea-ba3a9dc7ea10. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 789.108115] env[62070]: DEBUG nova.network.neutron [req-b90272c2-35c1-494f-8260-017f0a8ed080 req-c5431c48-8493-427c-a4c6-0e0fad9d40e1 service nova] [instance: 10672096-00ba-4481-8ab3-085a185076db] Updating instance_info_cache with network_info: [{"id": "d7ba9e32-c151-4f86-90ea-ba3a9dc7ea10", "address": "fa:16:3e:0b:04:cf", "network": {"id": "25f38244-53c8-44e1-a7a9-a8a37ce83ffa", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1382757376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7525c88cd803420094fb2af593ba5d65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7ba9e32-c1", "ovs_interfaceid": "d7ba9e32-c151-4f86-90ea-ba3a9dc7ea10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.184934] env[62070]: DEBUG nova.policy [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7534320dee8f486e90f5174aa94d00bd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '925dff51764c4b56ae7ea05fbde2ecdd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 789.316254] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121671, 'name': CreateVM_Task, 'duration_secs': 0.567747} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.319033] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10672096-00ba-4481-8ab3-085a185076db] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 789.319033] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 789.319033] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.319033] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 789.319033] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae01c43e-0664-4fd0-a69b-8421f00cdfb5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.323409] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 789.323409] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]529b3e51-e266-0769-db8d-172688d410ec" [ 789.323409] env[62070]: _type = "Task" [ 789.323409] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.332741] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]529b3e51-e266-0769-db8d-172688d410ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.344508] env[62070]: DEBUG oslo_vmware.api [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Task: {'id': task-1121673, 'name': PowerOffVM_Task, 'duration_secs': 0.287326} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.344817] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 789.345143] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 789.345286] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4e17e420-4e04-48cc-a578-8ba4681953b1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.396144] env[62070]: DEBUG oslo_vmware.rw_handles [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524cea7d-34c1-a27a-7e97-5e24fca93c1f/disk-0.vmdk. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 789.396144] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d2edf08-1a85-46a3-b939-7ed85aff3382 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.402248] env[62070]: DEBUG oslo_vmware.rw_handles [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524cea7d-34c1-a27a-7e97-5e24fca93c1f/disk-0.vmdk is in state: ready. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 789.402611] env[62070]: ERROR oslo_vmware.rw_handles [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524cea7d-34c1-a27a-7e97-5e24fca93c1f/disk-0.vmdk due to incomplete transfer. [ 789.403080] env[62070]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5f272735-6e4e-4854-a4cb-806e1f84697d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.413614] env[62070]: DEBUG oslo_vmware.rw_handles [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524cea7d-34c1-a27a-7e97-5e24fca93c1f/disk-0.vmdk. {{(pid=62070) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 789.413982] env[62070]: DEBUG nova.virt.vmwareapi.images [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Uploaded image 4dd20ead-44bf-4233-8544-9a7e21d4f7f2 to the Glance image server {{(pid=62070) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 789.416288] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Destroying the VM {{(pid=62070) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 789.416689] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-6489f205-6415-4385-af77-3cf945027b8f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.428035] env[62070]: DEBUG oslo_vmware.api [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 789.428035] env[62070]: value = "task-1121676" [ 789.428035] env[62070]: _type = "Task" [ 789.428035] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.438424] env[62070]: DEBUG oslo_vmware.api [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121676, 'name': Destroy_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.494107] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 789.494107] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 789.494107] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Deleting the datastore file [datastore2] 1ce155c8-9a10-4eff-b428-31889aa8f638 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 789.494107] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4bd417a1-c651-42c1-9de8-715fd3d58290 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.496967] env[62070]: DEBUG oslo_concurrency.lockutils [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 789.508275] env[62070]: DEBUG oslo_vmware.api [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Waiting for the task: (returnval){ [ 789.508275] env[62070]: value = "task-1121677" [ 789.508275] env[62070]: _type = "Task" [ 789.508275] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.517750] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5434188-d76d-43fe-8d7c-71ed45d66d4a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.530490] env[62070]: DEBUG oslo_vmware.api [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Task: {'id': task-1121677, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.534904] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e614dfc-3942-4f54-bd57-8f29d9e1bcf8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.539641] env[62070]: DEBUG nova.network.neutron [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Successfully created port: 7df84135-5c3d-48c5-b2cf-176e77094879 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 789.574253] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebed207f-afae-42e2-ad8e-1d33e74e7e0b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.581840] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Task: {'id': task-1121674, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.585019] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5bd45bd-2444-493a-a035-6700fa4a171a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.599103] env[62070]: DEBUG nova.compute.provider_tree [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 789.611619] env[62070]: DEBUG oslo_concurrency.lockutils [req-b90272c2-35c1-494f-8260-017f0a8ed080 req-c5431c48-8493-427c-a4c6-0e0fad9d40e1 service nova] Releasing lock "refresh_cache-10672096-00ba-4481-8ab3-085a185076db" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.834474] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]529b3e51-e266-0769-db8d-172688d410ec, 'name': SearchDatastore_Task, 'duration_secs': 0.040166} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.834789] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.835037] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 789.835306] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 789.835425] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.835609] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 789.835877] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8854c620-4d49-45e5-b3b2-23ca4d12159e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.843904] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 789.844362] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 789.844826] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8a3c7af-0d51-438a-846c-7d58e3ef2ba7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.849829] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 789.849829] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]528e7455-bb8a-121e-90f0-bea1702d04b5" [ 789.849829] env[62070]: _type = "Task" [ 789.849829] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.857369] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]528e7455-bb8a-121e-90f0-bea1702d04b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.936408] env[62070]: DEBUG oslo_vmware.api [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121676, 'name': Destroy_Task, 'duration_secs': 0.382079} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.936716] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Destroyed the VM [ 789.937148] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Deleting Snapshot of the VM instance {{(pid=62070) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 789.937415] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2e40966a-5656-4f10-b948-e464b4ab2229 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.944019] env[62070]: DEBUG oslo_vmware.api [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 789.944019] env[62070]: value = "task-1121678" [ 789.944019] env[62070]: _type = "Task" [ 789.944019] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.955786] env[62070]: DEBUG oslo_vmware.api [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121678, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.023190] env[62070]: DEBUG oslo_vmware.api [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Task: {'id': task-1121677, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.267303} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.023190] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 790.023190] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 790.023190] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 790.023190] env[62070]: INFO nova.compute.manager [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Took 1.22 seconds to destroy the instance on the hypervisor. [ 790.023190] env[62070]: DEBUG oslo.service.loopingcall [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 790.023190] env[62070]: DEBUG nova.compute.manager [-] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 790.023190] env[62070]: DEBUG nova.network.neutron [-] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 790.084045] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Task: {'id': task-1121674, 'name': ReconfigVM_Task, 'duration_secs': 0.710221} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.084374] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Reconfigured VM instance instance-00000036 to attach disk [datastore1] b7fdf23e-1e39-4745-ae84-38b7fa89aa5d/b7fdf23e-1e39-4745-ae84-38b7fa89aa5d.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 790.085087] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8fd3feb5-59ab-4ea9-953f-0d0692ec07ee {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.091932] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Waiting for the task: (returnval){ [ 790.091932] env[62070]: value = "task-1121679" [ 790.091932] env[62070]: _type = "Task" [ 790.091932] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.103163] env[62070]: DEBUG nova.scheduler.client.report [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 790.111017] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Task: {'id': task-1121679, 'name': Rename_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.359905] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]528e7455-bb8a-121e-90f0-bea1702d04b5, 'name': SearchDatastore_Task, 'duration_secs': 0.009707} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.360724] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab4bd2e5-4094-4f35-9be9-45ce34628e21 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.368347] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 790.368347] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52d7233f-421e-c8cd-16da-db74b915e588" [ 790.368347] env[62070]: _type = "Task" [ 790.368347] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.376405] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52d7233f-421e-c8cd-16da-db74b915e588, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.453496] env[62070]: DEBUG oslo_vmware.api [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121678, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.602854] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Task: {'id': task-1121679, 'name': Rename_Task, 'duration_secs': 0.152094} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.603171] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 790.603509] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e843cdc3-ee61-4fe1-8f4a-e2ef5e4158ab {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.609937] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Waiting for the task: (returnval){ [ 790.609937] env[62070]: value = "task-1121680" [ 790.609937] env[62070]: _type = "Task" [ 790.609937] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.613491] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.678s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.613993] env[62070]: DEBUG nova.compute.manager [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 790.616650] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 24.885s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.617950] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.617950] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62070) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 790.617950] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.045s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.619107] env[62070]: INFO nova.compute.claims [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 790.624223] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d23d0132-4c78-46f5-aebc-ef6860751530 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.628217] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Task: {'id': task-1121680, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.634402] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a6cfd0-d4eb-4d15-a48b-363471ce105b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.649371] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7996898f-d873-44ce-ac9e-a5e2c7d58347 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.656447] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66bda17f-c3c2-4a10-9369-7d0dabdeac49 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.690017] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180304MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=62070) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 790.690017] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.877184] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52d7233f-421e-c8cd-16da-db74b915e588, 'name': SearchDatastore_Task, 'duration_secs': 0.033258} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.877497] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.877768] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 10672096-00ba-4481-8ab3-085a185076db/10672096-00ba-4481-8ab3-085a185076db.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 790.878065] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f636970-a3a9-4bcd-8f98-2d71ab33977e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.884095] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 790.884095] env[62070]: value = "task-1121681" [ 790.884095] env[62070]: _type = "Task" [ 790.884095] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.894100] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121681, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.955011] env[62070]: DEBUG oslo_vmware.api [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121678, 'name': RemoveSnapshot_Task, 'duration_secs': 1.008728} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.955435] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Deleted Snapshot of the VM instance {{(pid=62070) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 790.955834] env[62070]: INFO nova.compute.manager [None req-4566d04d-4f38-497b-9c8b-991a179272b8 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Took 16.44 seconds to snapshot the instance on the hypervisor. [ 791.049676] env[62070]: DEBUG nova.network.neutron [-] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.121242] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Task: {'id': task-1121680, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.123726] env[62070]: DEBUG nova.compute.utils [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 791.128156] env[62070]: DEBUG nova.compute.manager [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 791.128342] env[62070]: DEBUG nova.network.neutron [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 791.147176] env[62070]: DEBUG nova.network.neutron [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Successfully updated port: 7df84135-5c3d-48c5-b2cf-176e77094879 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 791.182313] env[62070]: DEBUG nova.policy [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0c0209c9a3c7400990f8f1d27a29e3d0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7525c88cd803420094fb2af593ba5d65', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 791.344464] env[62070]: DEBUG nova.compute.manager [req-694b0eea-2a91-4fe6-bbb1-855a50246f7d req-3aaa5862-5476-4631-809f-265724e35fe4 service nova] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Received event network-vif-deleted-9cd48720-b8f6-4b76-ba6d-e6f0d9916a62 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 791.344627] env[62070]: DEBUG nova.compute.manager [req-694b0eea-2a91-4fe6-bbb1-855a50246f7d req-3aaa5862-5476-4631-809f-265724e35fe4 service nova] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Received event network-vif-deleted-266d9e51-fe0e-458d-a1b5-ecdae4dd0c6b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 791.399641] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121681, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.522400] env[62070]: DEBUG nova.compute.manager [req-38d1fa53-8295-4b62-8e70-4ed498af950e req-6f1a43c4-b566-411b-b534-eded778ab63f service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Received event network-vif-plugged-7df84135-5c3d-48c5-b2cf-176e77094879 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 791.522621] env[62070]: DEBUG oslo_concurrency.lockutils [req-38d1fa53-8295-4b62-8e70-4ed498af950e req-6f1a43c4-b566-411b-b534-eded778ab63f service nova] Acquiring lock "0ac963b1-120a-464b-8228-3393135dd182-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.522828] env[62070]: DEBUG oslo_concurrency.lockutils [req-38d1fa53-8295-4b62-8e70-4ed498af950e req-6f1a43c4-b566-411b-b534-eded778ab63f service nova] Lock "0ac963b1-120a-464b-8228-3393135dd182-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 791.526019] env[62070]: DEBUG oslo_concurrency.lockutils [req-38d1fa53-8295-4b62-8e70-4ed498af950e req-6f1a43c4-b566-411b-b534-eded778ab63f service nova] Lock "0ac963b1-120a-464b-8228-3393135dd182-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.526019] env[62070]: DEBUG nova.compute.manager [req-38d1fa53-8295-4b62-8e70-4ed498af950e req-6f1a43c4-b566-411b-b534-eded778ab63f service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] No waiting events found dispatching network-vif-plugged-7df84135-5c3d-48c5-b2cf-176e77094879 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 791.526019] env[62070]: WARNING nova.compute.manager [req-38d1fa53-8295-4b62-8e70-4ed498af950e req-6f1a43c4-b566-411b-b534-eded778ab63f service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Received unexpected event network-vif-plugged-7df84135-5c3d-48c5-b2cf-176e77094879 for instance with vm_state active and task_state None. [ 791.556879] env[62070]: INFO nova.compute.manager [-] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Took 1.53 seconds to deallocate network for instance. [ 791.577284] env[62070]: DEBUG nova.network.neutron [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Successfully created port: 903ef3d7-deb3-4330-800a-6c593704799d {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 791.619495] env[62070]: DEBUG oslo_vmware.api [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Task: {'id': task-1121680, 'name': PowerOnVM_Task, 'duration_secs': 0.578309} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.619755] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 791.619942] env[62070]: INFO nova.compute.manager [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Took 8.82 seconds to spawn the instance on the hypervisor. [ 791.620149] env[62070]: DEBUG nova.compute.manager [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 791.620946] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa3295aa-0d0e-415c-957b-2e79cfd31ee4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.631998] env[62070]: DEBUG nova.compute.manager [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 791.650093] env[62070]: DEBUG oslo_concurrency.lockutils [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "refresh_cache-0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 791.650190] env[62070]: DEBUG oslo_concurrency.lockutils [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired lock "refresh_cache-0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.650355] env[62070]: DEBUG nova.network.neutron [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 791.895637] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121681, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.652237} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.897947] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 10672096-00ba-4481-8ab3-085a185076db/10672096-00ba-4481-8ab3-085a185076db.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 791.898201] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 791.898649] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f69ae48f-5d8c-46b5-92fe-b36b7baa1c8d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.904970] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 791.904970] env[62070]: value = "task-1121682" [ 791.904970] env[62070]: _type = "Task" [ 791.904970] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.919574] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121682, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.026530] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4fbf84-0f3d-447c-a316-1e290a22d854 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.034044] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd9f97cf-ef2a-4cfe-a882-cf144f681552 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.064224] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.065020] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a73bd06-6e81-4417-9cce-c82801773e67 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.072467] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9dea7a-4e44-4520-8693-edc5ecced505 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.085342] env[62070]: DEBUG nova.compute.provider_tree [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 792.143636] env[62070]: INFO nova.compute.manager [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Took 36.93 seconds to build instance. [ 792.187703] env[62070]: WARNING nova.network.neutron [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] 48dc51c7-cfa4-452e-9d72-2968d9a40dfa already exists in list: networks containing: ['48dc51c7-cfa4-452e-9d72-2968d9a40dfa']. ignoring it [ 792.415413] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121682, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077624} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.417973] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 792.418770] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-988bbf11-f76e-4bed-a794-7ad2ebacb29c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.442878] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] 10672096-00ba-4481-8ab3-085a185076db/10672096-00ba-4481-8ab3-085a185076db.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 792.443429] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5c3e754-cd9a-49a5-af66-5d98307d61af {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.464975] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 792.464975] env[62070]: value = "task-1121683" [ 792.464975] env[62070]: _type = "Task" [ 792.464975] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.472442] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121683, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.497982] env[62070]: DEBUG nova.network.neutron [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Updating instance_info_cache with network_info: [{"id": "6326b098-3c76-4152-b623-8921285ec01b", "address": "fa:16:3e:cc:12:22", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6326b098-3c", "ovs_interfaceid": "6326b098-3c76-4152-b623-8921285ec01b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7df84135-5c3d-48c5-b2cf-176e77094879", "address": "fa:16:3e:b9:a9:95", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7df84135-5c", "ovs_interfaceid": "7df84135-5c3d-48c5-b2cf-176e77094879", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.605975] env[62070]: ERROR nova.scheduler.client.report [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [req-f3395faa-a79d-4c0e-a085-1b3403fc4788] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 21c7c111-1b69-4468-b2c4-5dd96014fbd6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f3395faa-a79d-4c0e-a085-1b3403fc4788"}]} [ 792.621839] env[62070]: DEBUG nova.scheduler.client.report [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Refreshing inventories for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 792.636584] env[62070]: DEBUG nova.scheduler.client.report [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Updating ProviderTree inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 792.636819] env[62070]: DEBUG nova.compute.provider_tree [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 792.644386] env[62070]: DEBUG nova.compute.manager [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 792.646959] env[62070]: DEBUG oslo_concurrency.lockutils [None req-90df06cb-f655-463e-98fa-a0efa725c8cc tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Lock "b7fdf23e-1e39-4745-ae84-38b7fa89aa5d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 119.210s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.649134] env[62070]: DEBUG nova.scheduler.client.report [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Refreshing aggregate associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, aggregates: None {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 792.667033] env[62070]: DEBUG nova.virt.hardware [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 792.667033] env[62070]: DEBUG nova.virt.hardware [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 792.667033] env[62070]: DEBUG nova.virt.hardware [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 792.667475] env[62070]: DEBUG nova.virt.hardware [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 792.667475] env[62070]: DEBUG nova.virt.hardware [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 792.667653] env[62070]: DEBUG nova.virt.hardware [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 792.667743] env[62070]: DEBUG nova.virt.hardware [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 792.667933] env[62070]: DEBUG nova.virt.hardware [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 792.668142] env[62070]: DEBUG nova.virt.hardware [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 792.668334] env[62070]: DEBUG nova.virt.hardware [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 792.668590] env[62070]: DEBUG nova.virt.hardware [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 792.669659] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d0e1418-9b4a-40d1-bc6e-06be3f328ffc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.672928] env[62070]: DEBUG nova.scheduler.client.report [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Refreshing trait associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 792.680029] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ae7f3d-50df-44d7-95b6-df8d1714f7f3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.976395] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121683, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.000808] env[62070]: DEBUG oslo_concurrency.lockutils [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Releasing lock "refresh_cache-0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 793.001052] env[62070]: DEBUG oslo_concurrency.lockutils [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 793.001222] env[62070]: DEBUG oslo_concurrency.lockutils [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired lock "0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.002170] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb08aadb-81ca-4285-bdce-d82c15c8a49a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.021260] env[62070]: DEBUG nova.virt.hardware [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 793.021396] env[62070]: DEBUG nova.virt.hardware [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 793.021538] env[62070]: DEBUG nova.virt.hardware [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 793.021936] env[62070]: DEBUG nova.virt.hardware [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 793.021936] env[62070]: DEBUG nova.virt.hardware [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 793.023034] env[62070]: DEBUG nova.virt.hardware [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 793.023034] env[62070]: DEBUG nova.virt.hardware [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 793.023034] env[62070]: DEBUG nova.virt.hardware [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 793.023034] env[62070]: DEBUG nova.virt.hardware [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 793.023034] env[62070]: DEBUG nova.virt.hardware [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 793.023034] env[62070]: DEBUG nova.virt.hardware [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 793.029311] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Reconfiguring VM to attach interface {{(pid=62070) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1929}} [ 793.029902] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac9b2953-8394-438c-83de-40dcac51a1ff {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.053856] env[62070]: DEBUG oslo_vmware.api [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 793.053856] env[62070]: value = "task-1121684" [ 793.053856] env[62070]: _type = "Task" [ 793.053856] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.062122] env[62070]: DEBUG oslo_vmware.api [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121684, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.101043] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9630c89-61a6-4372-93b5-6a17ab878929 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.109173] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a29fba96-0b10-4cdf-ae21-894504729b8d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.141848] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c92a85-1303-437f-8a68-f55d0556d1e6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.148694] env[62070]: DEBUG nova.compute.manager [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 793.152232] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb7f1c3-ed42-4c86-8408-edb824c6a433 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.166713] env[62070]: DEBUG nova.compute.provider_tree [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 793.219387] env[62070]: DEBUG nova.network.neutron [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Successfully updated port: 903ef3d7-deb3-4330-800a-6c593704799d {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 793.246813] env[62070]: DEBUG nova.compute.manager [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 793.248023] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1564ab4f-2918-4d0e-b3d7-71853b3f009f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.382438] env[62070]: DEBUG nova.compute.manager [req-13363040-a1d1-44ba-9bf4-2df1292dae46 req-faff7638-84d1-4d2b-8402-0352bd1ced8f service nova] [instance: 2368b649-f931-454c-92cc-971df4155d90] Received event network-vif-plugged-903ef3d7-deb3-4330-800a-6c593704799d {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 793.382680] env[62070]: DEBUG oslo_concurrency.lockutils [req-13363040-a1d1-44ba-9bf4-2df1292dae46 req-faff7638-84d1-4d2b-8402-0352bd1ced8f service nova] Acquiring lock "2368b649-f931-454c-92cc-971df4155d90-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.382894] env[62070]: DEBUG oslo_concurrency.lockutils [req-13363040-a1d1-44ba-9bf4-2df1292dae46 req-faff7638-84d1-4d2b-8402-0352bd1ced8f service nova] Lock "2368b649-f931-454c-92cc-971df4155d90-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.383083] env[62070]: DEBUG oslo_concurrency.lockutils [req-13363040-a1d1-44ba-9bf4-2df1292dae46 req-faff7638-84d1-4d2b-8402-0352bd1ced8f service nova] Lock "2368b649-f931-454c-92cc-971df4155d90-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.383259] env[62070]: DEBUG nova.compute.manager [req-13363040-a1d1-44ba-9bf4-2df1292dae46 req-faff7638-84d1-4d2b-8402-0352bd1ced8f service nova] [instance: 2368b649-f931-454c-92cc-971df4155d90] No waiting events found dispatching network-vif-plugged-903ef3d7-deb3-4330-800a-6c593704799d {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 793.383427] env[62070]: WARNING nova.compute.manager [req-13363040-a1d1-44ba-9bf4-2df1292dae46 req-faff7638-84d1-4d2b-8402-0352bd1ced8f service nova] [instance: 2368b649-f931-454c-92cc-971df4155d90] Received unexpected event network-vif-plugged-903ef3d7-deb3-4330-800a-6c593704799d for instance with vm_state building and task_state spawning. [ 793.383588] env[62070]: DEBUG nova.compute.manager [req-13363040-a1d1-44ba-9bf4-2df1292dae46 req-faff7638-84d1-4d2b-8402-0352bd1ced8f service nova] [instance: 2368b649-f931-454c-92cc-971df4155d90] Received event network-changed-903ef3d7-deb3-4330-800a-6c593704799d {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 793.383781] env[62070]: DEBUG nova.compute.manager [req-13363040-a1d1-44ba-9bf4-2df1292dae46 req-faff7638-84d1-4d2b-8402-0352bd1ced8f service nova] [instance: 2368b649-f931-454c-92cc-971df4155d90] Refreshing instance network info cache due to event network-changed-903ef3d7-deb3-4330-800a-6c593704799d. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 793.383977] env[62070]: DEBUG oslo_concurrency.lockutils [req-13363040-a1d1-44ba-9bf4-2df1292dae46 req-faff7638-84d1-4d2b-8402-0352bd1ced8f service nova] Acquiring lock "refresh_cache-2368b649-f931-454c-92cc-971df4155d90" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 793.384158] env[62070]: DEBUG oslo_concurrency.lockutils [req-13363040-a1d1-44ba-9bf4-2df1292dae46 req-faff7638-84d1-4d2b-8402-0352bd1ced8f service nova] Acquired lock "refresh_cache-2368b649-f931-454c-92cc-971df4155d90" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.384337] env[62070]: DEBUG nova.network.neutron [req-13363040-a1d1-44ba-9bf4-2df1292dae46 req-faff7638-84d1-4d2b-8402-0352bd1ced8f service nova] [instance: 2368b649-f931-454c-92cc-971df4155d90] Refreshing network info cache for port 903ef3d7-deb3-4330-800a-6c593704799d {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 793.476205] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121683, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.563850] env[62070]: DEBUG oslo_vmware.api [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121684, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.577929] env[62070]: DEBUG nova.compute.manager [req-cdb2a621-ca34-470d-9889-7c3f72d52deb req-9341f3b4-b298-42ef-b62f-6d90610ec0dc service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Received event network-changed-7df84135-5c3d-48c5-b2cf-176e77094879 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 793.578156] env[62070]: DEBUG nova.compute.manager [req-cdb2a621-ca34-470d-9889-7c3f72d52deb req-9341f3b4-b298-42ef-b62f-6d90610ec0dc service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Refreshing instance network info cache due to event network-changed-7df84135-5c3d-48c5-b2cf-176e77094879. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 793.578353] env[62070]: DEBUG oslo_concurrency.lockutils [req-cdb2a621-ca34-470d-9889-7c3f72d52deb req-9341f3b4-b298-42ef-b62f-6d90610ec0dc service nova] Acquiring lock "refresh_cache-0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 793.578505] env[62070]: DEBUG oslo_concurrency.lockutils [req-cdb2a621-ca34-470d-9889-7c3f72d52deb req-9341f3b4-b298-42ef-b62f-6d90610ec0dc service nova] Acquired lock "refresh_cache-0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.578670] env[62070]: DEBUG nova.network.neutron [req-cdb2a621-ca34-470d-9889-7c3f72d52deb req-9341f3b4-b298-42ef-b62f-6d90610ec0dc service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Refreshing network info cache for port 7df84135-5c3d-48c5-b2cf-176e77094879 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 793.672993] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.699557] env[62070]: DEBUG nova.scheduler.client.report [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Updated inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with generation 80 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 793.699944] env[62070]: DEBUG nova.compute.provider_tree [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Updating resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 generation from 80 to 81 during operation: update_inventory {{(pid=62070) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 793.700209] env[62070]: DEBUG nova.compute.provider_tree [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 793.721606] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "refresh_cache-2368b649-f931-454c-92cc-971df4155d90" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 793.759263] env[62070]: INFO nova.compute.manager [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] instance snapshotting [ 793.762159] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e34e6b-a3e6-4de2-8fff-2743d14b0024 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.781705] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3ee91b-140d-4a52-b7eb-7940368383ea {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.932060] env[62070]: DEBUG nova.network.neutron [req-13363040-a1d1-44ba-9bf4-2df1292dae46 req-faff7638-84d1-4d2b-8402-0352bd1ced8f service nova] [instance: 2368b649-f931-454c-92cc-971df4155d90] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 793.977464] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121683, 'name': ReconfigVM_Task, 'duration_secs': 1.324592} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.977658] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Reconfigured VM instance instance-00000037 to attach disk [datastore1] 10672096-00ba-4481-8ab3-085a185076db/10672096-00ba-4481-8ab3-085a185076db.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 793.978382] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dabff3c5-4c3d-4889-9b0b-5095d48801bc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.986196] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 793.986196] env[62070]: value = "task-1121685" [ 793.986196] env[62070]: _type = "Task" [ 793.986196] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.994397] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121685, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.051264] env[62070]: DEBUG nova.network.neutron [req-13363040-a1d1-44ba-9bf4-2df1292dae46 req-faff7638-84d1-4d2b-8402-0352bd1ced8f service nova] [instance: 2368b649-f931-454c-92cc-971df4155d90] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.064830] env[62070]: DEBUG oslo_vmware.api [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121684, 'name': ReconfigVM_Task, 'duration_secs': 0.926118} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.066343] env[62070]: DEBUG oslo_concurrency.lockutils [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Releasing lock "0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 794.066476] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Reconfigured VM to attach interface {{(pid=62070) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1943}} [ 794.206082] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.588s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.206499] env[62070]: DEBUG nova.compute.manager [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 794.209900] env[62070]: DEBUG oslo_concurrency.lockutils [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.641s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.211658] env[62070]: INFO nova.compute.claims [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 794.296424] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Creating Snapshot of the VM instance {{(pid=62070) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 794.296749] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-994b53c5-aab7-49dd-9a49-4a2c608150b1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.303485] env[62070]: DEBUG oslo_vmware.api [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 794.303485] env[62070]: value = "task-1121686" [ 794.303485] env[62070]: _type = "Task" [ 794.303485] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.311773] env[62070]: DEBUG oslo_vmware.api [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121686, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.319261] env[62070]: DEBUG nova.network.neutron [req-cdb2a621-ca34-470d-9889-7c3f72d52deb req-9341f3b4-b298-42ef-b62f-6d90610ec0dc service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Updated VIF entry in instance network info cache for port 7df84135-5c3d-48c5-b2cf-176e77094879. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 794.319706] env[62070]: DEBUG nova.network.neutron [req-cdb2a621-ca34-470d-9889-7c3f72d52deb req-9341f3b4-b298-42ef-b62f-6d90610ec0dc service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Updating instance_info_cache with network_info: [{"id": "6326b098-3c76-4152-b623-8921285ec01b", "address": "fa:16:3e:cc:12:22", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6326b098-3c", "ovs_interfaceid": "6326b098-3c76-4152-b623-8921285ec01b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7df84135-5c3d-48c5-b2cf-176e77094879", "address": "fa:16:3e:b9:a9:95", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7df84135-5c", "ovs_interfaceid": "7df84135-5c3d-48c5-b2cf-176e77094879", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.496559] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121685, 'name': Rename_Task, 'duration_secs': 0.149888} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.496882] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 794.497340] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-95d9c91d-86c0-4436-b326-9143faf5515a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.503869] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 794.503869] env[62070]: value = "task-1121687" [ 794.503869] env[62070]: _type = "Task" [ 794.503869] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.512630] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121687, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.554681] env[62070]: DEBUG oslo_concurrency.lockutils [req-13363040-a1d1-44ba-9bf4-2df1292dae46 req-faff7638-84d1-4d2b-8402-0352bd1ced8f service nova] Releasing lock "refresh_cache-2368b649-f931-454c-92cc-971df4155d90" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 794.555096] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquired lock "refresh_cache-2368b649-f931-454c-92cc-971df4155d90" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.555265] env[62070]: DEBUG nova.network.neutron [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 794.570537] env[62070]: DEBUG oslo_concurrency.lockutils [None req-15709be5-751d-460f-82a0-c02f3a133ee4 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "interface-0ac963b1-120a-464b-8228-3393135dd182-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.618s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.718885] env[62070]: DEBUG nova.compute.utils [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 794.720262] env[62070]: DEBUG nova.compute.manager [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 794.721376] env[62070]: DEBUG nova.network.neutron [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 794.763415] env[62070]: DEBUG nova.policy [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0c0209c9a3c7400990f8f1d27a29e3d0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7525c88cd803420094fb2af593ba5d65', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 794.815348] env[62070]: DEBUG oslo_vmware.api [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121686, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.822409] env[62070]: DEBUG oslo_concurrency.lockutils [req-cdb2a621-ca34-470d-9889-7c3f72d52deb req-9341f3b4-b298-42ef-b62f-6d90610ec0dc service nova] Releasing lock "refresh_cache-0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 795.015430] env[62070]: DEBUG oslo_vmware.api [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121687, 'name': PowerOnVM_Task, 'duration_secs': 0.493616} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.015773] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 795.015968] env[62070]: INFO nova.compute.manager [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Took 9.50 seconds to spawn the instance on the hypervisor. [ 795.016211] env[62070]: DEBUG nova.compute.manager [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 795.017099] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a103b12-82cc-43f7-af19-5540e56c5193 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.079667] env[62070]: DEBUG nova.network.neutron [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Successfully created port: 48f9f765-2091-4515-9b46-46217835d2df {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 795.094958] env[62070]: DEBUG nova.network.neutron [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 795.220662] env[62070]: DEBUG nova.compute.manager [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 795.252201] env[62070]: DEBUG nova.network.neutron [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Updating instance_info_cache with network_info: [{"id": "903ef3d7-deb3-4330-800a-6c593704799d", "address": "fa:16:3e:fe:52:76", "network": {"id": "25f38244-53c8-44e1-a7a9-a8a37ce83ffa", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1382757376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7525c88cd803420094fb2af593ba5d65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap903ef3d7-de", "ovs_interfaceid": "903ef3d7-deb3-4330-800a-6c593704799d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.317031] env[62070]: DEBUG oslo_vmware.api [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121686, 'name': CreateSnapshot_Task, 'duration_secs': 0.578899} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.317329] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Created Snapshot of the VM instance {{(pid=62070) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 795.318091] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b42492d3-678c-453d-90cc-f5ee74ab540a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.536604] env[62070]: INFO nova.compute.manager [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Took 39.47 seconds to build instance. [ 795.659111] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29eb2fe-bbff-4adf-ad88-310658da4caa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.667553] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e1e6365-acc8-48bd-a259-470dd4a9eb1e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.699034] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-599700f1-a546-493b-8f49-a52919eca99d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.706387] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f27d982-f6a9-4cf7-810d-8bed08704dd7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.719512] env[62070]: DEBUG nova.compute.provider_tree [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 795.756820] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Releasing lock "refresh_cache-2368b649-f931-454c-92cc-971df4155d90" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 795.757158] env[62070]: DEBUG nova.compute.manager [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Instance network_info: |[{"id": "903ef3d7-deb3-4330-800a-6c593704799d", "address": "fa:16:3e:fe:52:76", "network": {"id": "25f38244-53c8-44e1-a7a9-a8a37ce83ffa", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1382757376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7525c88cd803420094fb2af593ba5d65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap903ef3d7-de", "ovs_interfaceid": "903ef3d7-deb3-4330-800a-6c593704799d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 795.757856] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:52:76', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e1a5c1-4ae7-409b-8de7-d401684ef60d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '903ef3d7-deb3-4330-800a-6c593704799d', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 795.765157] env[62070]: DEBUG oslo.service.loopingcall [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 795.766036] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2368b649-f931-454c-92cc-971df4155d90] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 795.766247] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-71aa92ec-5204-4a40-9719-a9467de488d7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.786046] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 795.786046] env[62070]: value = "task-1121688" [ 795.786046] env[62070]: _type = "Task" [ 795.786046] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.793891] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121688, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.842725] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Creating linked-clone VM from snapshot {{(pid=62070) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 795.843093] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4b898d16-5b3f-48af-9c73-f3f3e6865047 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.851254] env[62070]: DEBUG oslo_vmware.api [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 795.851254] env[62070]: value = "task-1121689" [ 795.851254] env[62070]: _type = "Task" [ 795.851254] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.860118] env[62070]: DEBUG oslo_vmware.api [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121689, 'name': CloneVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.976053] env[62070]: DEBUG oslo_concurrency.lockutils [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Acquiring lock "b7fdf23e-1e39-4745-ae84-38b7fa89aa5d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.976685] env[62070]: DEBUG oslo_concurrency.lockutils [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Lock "b7fdf23e-1e39-4745-ae84-38b7fa89aa5d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.977084] env[62070]: DEBUG oslo_concurrency.lockutils [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Acquiring lock "b7fdf23e-1e39-4745-ae84-38b7fa89aa5d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.977433] env[62070]: DEBUG oslo_concurrency.lockutils [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Lock "b7fdf23e-1e39-4745-ae84-38b7fa89aa5d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.977769] env[62070]: DEBUG oslo_concurrency.lockutils [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Lock "b7fdf23e-1e39-4745-ae84-38b7fa89aa5d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.980746] env[62070]: INFO nova.compute.manager [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Terminating instance [ 795.983324] env[62070]: DEBUG nova.compute.manager [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 795.983524] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 795.984491] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6999062-5507-4fde-a194-f0c38ff6d398 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.992696] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 795.992955] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-23ee249f-9991-4111-b13a-671efbfdcb04 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.999528] env[62070]: DEBUG oslo_vmware.api [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Waiting for the task: (returnval){ [ 795.999528] env[62070]: value = "task-1121690" [ 795.999528] env[62070]: _type = "Task" [ 795.999528] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.008380] env[62070]: DEBUG oslo_vmware.api [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Task: {'id': task-1121690, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.039070] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f976259-6d6a-4631-9026-5216a519ae6c tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "10672096-00ba-4481-8ab3-085a185076db" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 115.197s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.223113] env[62070]: DEBUG nova.scheduler.client.report [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 796.231378] env[62070]: DEBUG nova.compute.manager [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 796.253706] env[62070]: DEBUG nova.virt.hardware [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 796.253971] env[62070]: DEBUG nova.virt.hardware [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 796.254191] env[62070]: DEBUG nova.virt.hardware [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 796.254398] env[62070]: DEBUG nova.virt.hardware [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 796.254548] env[62070]: DEBUG nova.virt.hardware [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 796.254698] env[62070]: DEBUG nova.virt.hardware [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 796.254904] env[62070]: DEBUG nova.virt.hardware [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 796.255080] env[62070]: DEBUG nova.virt.hardware [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 796.255256] env[62070]: DEBUG nova.virt.hardware [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 796.255421] env[62070]: DEBUG nova.virt.hardware [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 796.255596] env[62070]: DEBUG nova.virt.hardware [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 796.256703] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5a76a3-ac38-4db9-b639-477fd4de3585 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.264189] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1495a394-e80c-44b7-9df1-fb34b4a07a39 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.295549] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121688, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.361756] env[62070]: DEBUG oslo_vmware.api [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121689, 'name': CloneVM_Task} progress is 94%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.509630] env[62070]: DEBUG oslo_vmware.api [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Task: {'id': task-1121690, 'name': PowerOffVM_Task, 'duration_secs': 0.170335} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.509923] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 796.510310] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 796.510593] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8c8cee57-776c-4563-9d03-8e3768c97654 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.542950] env[62070]: DEBUG nova.compute.manager [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 796.574532] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 796.574632] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 796.574768] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Deleting the datastore file [datastore1] b7fdf23e-1e39-4745-ae84-38b7fa89aa5d {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 796.575051] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-96342ab5-1edb-473d-a30c-1e067bef70e3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.581883] env[62070]: DEBUG oslo_vmware.api [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Waiting for the task: (returnval){ [ 796.581883] env[62070]: value = "task-1121692" [ 796.581883] env[62070]: _type = "Task" [ 796.581883] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.591640] env[62070]: DEBUG oslo_vmware.api [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Task: {'id': task-1121692, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.683088] env[62070]: DEBUG oslo_concurrency.lockutils [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "interface-0ac963b1-120a-464b-8228-3393135dd182-848ce3e0-8d08-460b-b770-75628ae28fd3" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.683402] env[62070]: DEBUG oslo_concurrency.lockutils [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "interface-0ac963b1-120a-464b-8228-3393135dd182-848ce3e0-8d08-460b-b770-75628ae28fd3" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.683792] env[62070]: DEBUG nova.objects.instance [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lazy-loading 'flavor' on Instance uuid 0ac963b1-120a-464b-8228-3393135dd182 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 796.727633] env[62070]: DEBUG oslo_concurrency.lockutils [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.517s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.728446] env[62070]: DEBUG nova.compute.manager [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 796.731303] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.275s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.734174] env[62070]: INFO nova.compute.claims [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 796.796352] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121688, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.832268] env[62070]: DEBUG nova.compute.manager [req-4683ab9f-f5d7-4dbd-8f68-8f434b74aaf4 req-5d65030e-12bb-4973-8793-b116be11deeb service nova] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Received event network-vif-plugged-48f9f765-2091-4515-9b46-46217835d2df {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 796.832565] env[62070]: DEBUG oslo_concurrency.lockutils [req-4683ab9f-f5d7-4dbd-8f68-8f434b74aaf4 req-5d65030e-12bb-4973-8793-b116be11deeb service nova] Acquiring lock "f75ed36e-16c8-4a6b-bd39-eb4057ef0691-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.832840] env[62070]: DEBUG oslo_concurrency.lockutils [req-4683ab9f-f5d7-4dbd-8f68-8f434b74aaf4 req-5d65030e-12bb-4973-8793-b116be11deeb service nova] Lock "f75ed36e-16c8-4a6b-bd39-eb4057ef0691-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.833071] env[62070]: DEBUG oslo_concurrency.lockutils [req-4683ab9f-f5d7-4dbd-8f68-8f434b74aaf4 req-5d65030e-12bb-4973-8793-b116be11deeb service nova] Lock "f75ed36e-16c8-4a6b-bd39-eb4057ef0691-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.833263] env[62070]: DEBUG nova.compute.manager [req-4683ab9f-f5d7-4dbd-8f68-8f434b74aaf4 req-5d65030e-12bb-4973-8793-b116be11deeb service nova] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] No waiting events found dispatching network-vif-plugged-48f9f765-2091-4515-9b46-46217835d2df {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 796.833434] env[62070]: WARNING nova.compute.manager [req-4683ab9f-f5d7-4dbd-8f68-8f434b74aaf4 req-5d65030e-12bb-4973-8793-b116be11deeb service nova] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Received unexpected event network-vif-plugged-48f9f765-2091-4515-9b46-46217835d2df for instance with vm_state building and task_state spawning. [ 796.861453] env[62070]: DEBUG oslo_vmware.api [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121689, 'name': CloneVM_Task} progress is 94%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.069968] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.091750] env[62070]: DEBUG oslo_vmware.api [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Task: {'id': task-1121692, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133471} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.092388] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 797.092582] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 797.092762] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 797.092938] env[62070]: INFO nova.compute.manager [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Took 1.11 seconds to destroy the instance on the hypervisor. [ 797.093258] env[62070]: DEBUG oslo.service.loopingcall [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 797.093390] env[62070]: DEBUG nova.compute.manager [-] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 797.093466] env[62070]: DEBUG nova.network.neutron [-] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 797.235773] env[62070]: DEBUG nova.network.neutron [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Successfully updated port: 48f9f765-2091-4515-9b46-46217835d2df {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 797.245646] env[62070]: DEBUG nova.compute.utils [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 797.248559] env[62070]: DEBUG nova.compute.manager [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 797.248729] env[62070]: DEBUG nova.network.neutron [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 797.300245] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121688, 'name': CreateVM_Task, 'duration_secs': 1.468315} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.300245] env[62070]: DEBUG nova.policy [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '85707399ddf04d03bfb487560df1212e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd079c0ef3ed745fcaf69dc728dca4466', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 797.301139] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2368b649-f931-454c-92cc-971df4155d90] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 797.301768] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.301925] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.302271] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 797.302853] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19a73e79-d0e5-4478-9bc9-d51024ac0375 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.307525] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 797.307525] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5263e5ed-45a6-87ca-c890-b3e2201cd558" [ 797.307525] env[62070]: _type = "Task" [ 797.307525] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.323029] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5263e5ed-45a6-87ca-c890-b3e2201cd558, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.362578] env[62070]: DEBUG oslo_vmware.api [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121689, 'name': CloneVM_Task, 'duration_secs': 1.502561} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.362999] env[62070]: INFO nova.virt.vmwareapi.vmops [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Created linked-clone VM from snapshot [ 797.363661] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f089572c-0208-41c6-9834-b1fa99953c37 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.374222] env[62070]: DEBUG nova.virt.vmwareapi.images [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Uploading image 30541779-297e-45bd-87c3-2fea50b14cb3 {{(pid=62070) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 797.376385] env[62070]: DEBUG nova.objects.instance [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lazy-loading 'pci_requests' on Instance uuid 0ac963b1-120a-464b-8228-3393135dd182 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 797.399922] env[62070]: DEBUG oslo_vmware.rw_handles [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 797.399922] env[62070]: value = "vm-245396" [ 797.399922] env[62070]: _type = "VirtualMachine" [ 797.399922] env[62070]: }. {{(pid=62070) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 797.399922] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-13ca61b6-46af-4290-982a-5cc6303b26e7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.407349] env[62070]: DEBUG oslo_vmware.rw_handles [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lease: (returnval){ [ 797.407349] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5265e63d-a7e2-2cb1-4235-0123a6d9c4a6" [ 797.407349] env[62070]: _type = "HttpNfcLease" [ 797.407349] env[62070]: } obtained for exporting VM: (result){ [ 797.407349] env[62070]: value = "vm-245396" [ 797.407349] env[62070]: _type = "VirtualMachine" [ 797.407349] env[62070]: }. {{(pid=62070) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 797.407349] env[62070]: DEBUG oslo_vmware.api [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the lease: (returnval){ [ 797.407349] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5265e63d-a7e2-2cb1-4235-0123a6d9c4a6" [ 797.407349] env[62070]: _type = "HttpNfcLease" [ 797.407349] env[62070]: } to be ready. {{(pid=62070) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 797.413877] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 797.413877] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5265e63d-a7e2-2cb1-4235-0123a6d9c4a6" [ 797.413877] env[62070]: _type = "HttpNfcLease" [ 797.413877] env[62070]: } is initializing. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 797.559565] env[62070]: DEBUG nova.network.neutron [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Successfully created port: 8ed3d649-bc61-493f-b8e9-2e7f7fad49ed {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 797.575230] env[62070]: DEBUG nova.compute.manager [req-2e321dcf-08a4-4f5c-9b65-fa3022524446 req-cdf1115f-4fc8-411f-a3b2-ad1f900f8df9 service nova] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Received event network-changed-48f9f765-2091-4515-9b46-46217835d2df {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 797.575340] env[62070]: DEBUG nova.compute.manager [req-2e321dcf-08a4-4f5c-9b65-fa3022524446 req-cdf1115f-4fc8-411f-a3b2-ad1f900f8df9 service nova] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Refreshing instance network info cache due to event network-changed-48f9f765-2091-4515-9b46-46217835d2df. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 797.575812] env[62070]: DEBUG oslo_concurrency.lockutils [req-2e321dcf-08a4-4f5c-9b65-fa3022524446 req-cdf1115f-4fc8-411f-a3b2-ad1f900f8df9 service nova] Acquiring lock "refresh_cache-f75ed36e-16c8-4a6b-bd39-eb4057ef0691" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.575931] env[62070]: DEBUG oslo_concurrency.lockutils [req-2e321dcf-08a4-4f5c-9b65-fa3022524446 req-cdf1115f-4fc8-411f-a3b2-ad1f900f8df9 service nova] Acquired lock "refresh_cache-f75ed36e-16c8-4a6b-bd39-eb4057ef0691" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.576175] env[62070]: DEBUG nova.network.neutron [req-2e321dcf-08a4-4f5c-9b65-fa3022524446 req-cdf1115f-4fc8-411f-a3b2-ad1f900f8df9 service nova] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Refreshing network info cache for port 48f9f765-2091-4515-9b46-46217835d2df {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 797.594362] env[62070]: DEBUG nova.compute.manager [req-d2f3cb5b-65ee-4d1a-92c2-e5da2e3a99b9 req-1c5d4d07-028a-4652-acd0-03522e24bb1a service nova] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Received event network-vif-deleted-f5d6c6b2-70c6-484b-8c25-b15a991f1434 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 797.594362] env[62070]: INFO nova.compute.manager [req-d2f3cb5b-65ee-4d1a-92c2-e5da2e3a99b9 req-1c5d4d07-028a-4652-acd0-03522e24bb1a service nova] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Neutron deleted interface f5d6c6b2-70c6-484b-8c25-b15a991f1434; detaching it from the instance and deleting it from the info cache [ 797.594362] env[62070]: DEBUG nova.network.neutron [req-d2f3cb5b-65ee-4d1a-92c2-e5da2e3a99b9 req-1c5d4d07-028a-4652-acd0-03522e24bb1a service nova] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.745067] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "refresh_cache-f75ed36e-16c8-4a6b-bd39-eb4057ef0691" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.756865] env[62070]: DEBUG nova.compute.manager [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 797.822420] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5263e5ed-45a6-87ca-c890-b3e2201cd558, 'name': SearchDatastore_Task, 'duration_secs': 0.011325} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.822752] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 797.822990] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 797.823274] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.823425] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.823608] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 797.826298] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57508c72-e94e-4007-b387-b74e6eeacdd7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.836495] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 797.836678] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 797.837422] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90b85825-03e8-4dab-ad57-f549dd0ca7d9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.846361] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 797.846361] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52042307-8551-8da6-1979-2e7b6bc76417" [ 797.846361] env[62070]: _type = "Task" [ 797.846361] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.855487] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52042307-8551-8da6-1979-2e7b6bc76417, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.881139] env[62070]: DEBUG nova.objects.base [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Object Instance<0ac963b1-120a-464b-8228-3393135dd182> lazy-loaded attributes: flavor,pci_requests {{(pid=62070) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 797.881445] env[62070]: DEBUG nova.network.neutron [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 797.920125] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 797.920125] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5265e63d-a7e2-2cb1-4235-0123a6d9c4a6" [ 797.920125] env[62070]: _type = "HttpNfcLease" [ 797.920125] env[62070]: } is ready. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 797.922800] env[62070]: DEBUG oslo_vmware.rw_handles [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 797.922800] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5265e63d-a7e2-2cb1-4235-0123a6d9c4a6" [ 797.922800] env[62070]: _type = "HttpNfcLease" [ 797.922800] env[62070]: }. {{(pid=62070) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 797.925352] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c8fc09-e64d-4adf-92d8-9da195014904 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.933848] env[62070]: DEBUG nova.network.neutron [-] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.935074] env[62070]: DEBUG oslo_vmware.rw_handles [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e58e3a-ea0c-c8aa-f4c0-3b6b1ba57006/disk-0.vmdk from lease info. {{(pid=62070) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 797.935236] env[62070]: DEBUG oslo_vmware.rw_handles [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e58e3a-ea0c-c8aa-f4c0-3b6b1ba57006/disk-0.vmdk for reading. {{(pid=62070) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 797.994014] env[62070]: DEBUG nova.policy [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7534320dee8f486e90f5174aa94d00bd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '925dff51764c4b56ae7ea05fbde2ecdd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 798.096575] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-31e37bcc-79cf-4f0f-98a3-af3aaeadaa7b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.111282] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c1a501-e74b-490c-985a-e9aedcdb4410 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.128120] env[62070]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-20f3149a-4191-4978-b793-f8643fd9cb39 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.153752] env[62070]: DEBUG nova.compute.manager [req-d2f3cb5b-65ee-4d1a-92c2-e5da2e3a99b9 req-1c5d4d07-028a-4652-acd0-03522e24bb1a service nova] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Detach interface failed, port_id=f5d6c6b2-70c6-484b-8c25-b15a991f1434, reason: Instance b7fdf23e-1e39-4745-ae84-38b7fa89aa5d could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 798.157472] env[62070]: DEBUG nova.network.neutron [req-2e321dcf-08a4-4f5c-9b65-fa3022524446 req-cdf1115f-4fc8-411f-a3b2-ad1f900f8df9 service nova] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 798.265661] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f366f1-c422-4bd3-addc-27a4b56b728f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.274272] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2881c41-6816-440c-a7bd-ed5d3366797e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.306870] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6066ded-d225-4314-ba40-f5f6c7236f63 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.316756] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0acadddf-0b4e-4b3d-bca6-f7ae6b300c35 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.336635] env[62070]: DEBUG nova.compute.provider_tree [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 798.359116] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52042307-8551-8da6-1979-2e7b6bc76417, 'name': SearchDatastore_Task, 'duration_secs': 0.010029} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.360108] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d785b7dc-f258-4ed3-b275-ad99cf3f29ff {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.368023] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 798.368023] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]524e7d1a-b95a-3e2c-ab93-07383df65f87" [ 798.368023] env[62070]: _type = "Task" [ 798.368023] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.376073] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]524e7d1a-b95a-3e2c-ab93-07383df65f87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.438467] env[62070]: INFO nova.compute.manager [-] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Took 1.34 seconds to deallocate network for instance. [ 798.476169] env[62070]: DEBUG nova.network.neutron [req-2e321dcf-08a4-4f5c-9b65-fa3022524446 req-cdf1115f-4fc8-411f-a3b2-ad1f900f8df9 service nova] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.816343] env[62070]: DEBUG nova.compute.manager [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 798.816343] env[62070]: DEBUG nova.virt.hardware [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 798.816343] env[62070]: DEBUG nova.virt.hardware [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 798.816343] env[62070]: DEBUG nova.virt.hardware [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 798.816343] env[62070]: DEBUG nova.virt.hardware [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 798.817536] env[62070]: DEBUG nova.virt.hardware [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 798.817612] env[62070]: DEBUG nova.virt.hardware [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 798.817888] env[62070]: DEBUG nova.virt.hardware [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 798.818908] env[62070]: DEBUG nova.virt.hardware [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 798.819796] env[62070]: DEBUG nova.virt.hardware [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 798.820366] env[62070]: DEBUG nova.virt.hardware [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 798.820759] env[62070]: DEBUG nova.virt.hardware [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 798.824882] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34cb2e1f-4d13-446f-b061-f30b517f3dd1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.837792] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79946955-b06f-4d9c-a220-8e9349c0660b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.884671] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]524e7d1a-b95a-3e2c-ab93-07383df65f87, 'name': SearchDatastore_Task, 'duration_secs': 0.016076} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.886538] env[62070]: DEBUG nova.scheduler.client.report [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Updated inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with generation 81 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 798.886538] env[62070]: DEBUG nova.compute.provider_tree [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Updating resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 generation from 81 to 82 during operation: update_inventory {{(pid=62070) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 798.886753] env[62070]: DEBUG nova.compute.provider_tree [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 798.891398] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 798.891885] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 2368b649-f931-454c-92cc-971df4155d90/2368b649-f931-454c-92cc-971df4155d90.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 798.892593] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f804e73f-e163-42d1-bce7-a448d98150ef {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.903707] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 798.903707] env[62070]: value = "task-1121694" [ 798.903707] env[62070]: _type = "Task" [ 798.903707] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.916564] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121694, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.945193] env[62070]: DEBUG oslo_concurrency.lockutils [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.980146] env[62070]: DEBUG oslo_concurrency.lockutils [req-2e321dcf-08a4-4f5c-9b65-fa3022524446 req-cdf1115f-4fc8-411f-a3b2-ad1f900f8df9 service nova] Releasing lock "refresh_cache-f75ed36e-16c8-4a6b-bd39-eb4057ef0691" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 798.980497] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquired lock "refresh_cache-f75ed36e-16c8-4a6b-bd39-eb4057ef0691" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.980798] env[62070]: DEBUG nova.network.neutron [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 799.150619] env[62070]: DEBUG nova.network.neutron [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Successfully updated port: 8ed3d649-bc61-493f-b8e9-2e7f7fad49ed {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 799.396795] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.665s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.398433] env[62070]: DEBUG nova.compute.manager [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 799.402154] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.328s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.403979] env[62070]: INFO nova.compute.claims [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 799.422344] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121694, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.523954] env[62070]: DEBUG nova.network.neutron [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 799.653576] env[62070]: DEBUG oslo_concurrency.lockutils [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "refresh_cache-c3c6e93c-80be-4e71-87fb-2ff8db8d30fe" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.653751] env[62070]: DEBUG oslo_concurrency.lockutils [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired lock "refresh_cache-c3c6e93c-80be-4e71-87fb-2ff8db8d30fe" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.653910] env[62070]: DEBUG nova.network.neutron [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 799.682410] env[62070]: DEBUG nova.network.neutron [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Updating instance_info_cache with network_info: [{"id": "48f9f765-2091-4515-9b46-46217835d2df", "address": "fa:16:3e:be:a8:10", "network": {"id": "25f38244-53c8-44e1-a7a9-a8a37ce83ffa", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1382757376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7525c88cd803420094fb2af593ba5d65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48f9f765-20", "ovs_interfaceid": "48f9f765-2091-4515-9b46-46217835d2df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.910175] env[62070]: DEBUG nova.compute.utils [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 799.914670] env[62070]: DEBUG nova.compute.manager [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 799.914670] env[62070]: DEBUG nova.network.neutron [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 799.928304] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121694, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.748953} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.928652] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 2368b649-f931-454c-92cc-971df4155d90/2368b649-f931-454c-92cc-971df4155d90.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 799.928942] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 799.929235] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-41bab146-231a-4723-a306-82952648fbc8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.942085] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 799.942085] env[62070]: value = "task-1121695" [ 799.942085] env[62070]: _type = "Task" [ 799.942085] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.956833] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121695, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.983036] env[62070]: DEBUG nova.policy [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17d6ad6850c145a58492fa778a666e84', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8fa45cca1a114c0aa1e93a50672626ef', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 800.021569] env[62070]: DEBUG nova.compute.manager [req-e6e50844-6e1a-4c30-b5c6-80eb92a22a78 req-3b449edb-59b7-47cf-8d2a-5d3857358801 service nova] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Received event network-vif-plugged-8ed3d649-bc61-493f-b8e9-2e7f7fad49ed {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 800.021672] env[62070]: DEBUG oslo_concurrency.lockutils [req-e6e50844-6e1a-4c30-b5c6-80eb92a22a78 req-3b449edb-59b7-47cf-8d2a-5d3857358801 service nova] Acquiring lock "c3c6e93c-80be-4e71-87fb-2ff8db8d30fe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.021979] env[62070]: DEBUG oslo_concurrency.lockutils [req-e6e50844-6e1a-4c30-b5c6-80eb92a22a78 req-3b449edb-59b7-47cf-8d2a-5d3857358801 service nova] Lock "c3c6e93c-80be-4e71-87fb-2ff8db8d30fe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.022348] env[62070]: DEBUG oslo_concurrency.lockutils [req-e6e50844-6e1a-4c30-b5c6-80eb92a22a78 req-3b449edb-59b7-47cf-8d2a-5d3857358801 service nova] Lock "c3c6e93c-80be-4e71-87fb-2ff8db8d30fe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.022601] env[62070]: DEBUG nova.compute.manager [req-e6e50844-6e1a-4c30-b5c6-80eb92a22a78 req-3b449edb-59b7-47cf-8d2a-5d3857358801 service nova] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] No waiting events found dispatching network-vif-plugged-8ed3d649-bc61-493f-b8e9-2e7f7fad49ed {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 800.022844] env[62070]: WARNING nova.compute.manager [req-e6e50844-6e1a-4c30-b5c6-80eb92a22a78 req-3b449edb-59b7-47cf-8d2a-5d3857358801 service nova] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Received unexpected event network-vif-plugged-8ed3d649-bc61-493f-b8e9-2e7f7fad49ed for instance with vm_state building and task_state spawning. [ 800.023475] env[62070]: DEBUG nova.compute.manager [req-e6e50844-6e1a-4c30-b5c6-80eb92a22a78 req-3b449edb-59b7-47cf-8d2a-5d3857358801 service nova] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Received event network-changed-8ed3d649-bc61-493f-b8e9-2e7f7fad49ed {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 800.023646] env[62070]: DEBUG nova.compute.manager [req-e6e50844-6e1a-4c30-b5c6-80eb92a22a78 req-3b449edb-59b7-47cf-8d2a-5d3857358801 service nova] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Refreshing instance network info cache due to event network-changed-8ed3d649-bc61-493f-b8e9-2e7f7fad49ed. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 800.023827] env[62070]: DEBUG oslo_concurrency.lockutils [req-e6e50844-6e1a-4c30-b5c6-80eb92a22a78 req-3b449edb-59b7-47cf-8d2a-5d3857358801 service nova] Acquiring lock "refresh_cache-c3c6e93c-80be-4e71-87fb-2ff8db8d30fe" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 800.185702] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Releasing lock "refresh_cache-f75ed36e-16c8-4a6b-bd39-eb4057ef0691" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.185953] env[62070]: DEBUG nova.compute.manager [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Instance network_info: |[{"id": "48f9f765-2091-4515-9b46-46217835d2df", "address": "fa:16:3e:be:a8:10", "network": {"id": "25f38244-53c8-44e1-a7a9-a8a37ce83ffa", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1382757376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7525c88cd803420094fb2af593ba5d65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48f9f765-20", "ovs_interfaceid": "48f9f765-2091-4515-9b46-46217835d2df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 800.186813] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:a8:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e1a5c1-4ae7-409b-8de7-d401684ef60d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '48f9f765-2091-4515-9b46-46217835d2df', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 800.204233] env[62070]: DEBUG oslo.service.loopingcall [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 800.208789] env[62070]: DEBUG nova.network.neutron [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 800.212538] env[62070]: DEBUG nova.network.neutron [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Successfully updated port: 848ce3e0-8d08-460b-b770-75628ae28fd3 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 800.214165] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 800.217242] env[62070]: DEBUG nova.compute.manager [req-23860f58-3ea0-46f3-b2d2-b0576d78208d req-d71ad05c-83d9-4c48-af00-cc85c9a321b6 service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Received event network-vif-plugged-848ce3e0-8d08-460b-b770-75628ae28fd3 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 800.217242] env[62070]: DEBUG oslo_concurrency.lockutils [req-23860f58-3ea0-46f3-b2d2-b0576d78208d req-d71ad05c-83d9-4c48-af00-cc85c9a321b6 service nova] Acquiring lock "0ac963b1-120a-464b-8228-3393135dd182-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.217242] env[62070]: DEBUG oslo_concurrency.lockutils [req-23860f58-3ea0-46f3-b2d2-b0576d78208d req-d71ad05c-83d9-4c48-af00-cc85c9a321b6 service nova] Lock "0ac963b1-120a-464b-8228-3393135dd182-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.217242] env[62070]: DEBUG oslo_concurrency.lockutils [req-23860f58-3ea0-46f3-b2d2-b0576d78208d req-d71ad05c-83d9-4c48-af00-cc85c9a321b6 service nova] Lock "0ac963b1-120a-464b-8228-3393135dd182-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.217242] env[62070]: DEBUG nova.compute.manager [req-23860f58-3ea0-46f3-b2d2-b0576d78208d req-d71ad05c-83d9-4c48-af00-cc85c9a321b6 service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] No waiting events found dispatching network-vif-plugged-848ce3e0-8d08-460b-b770-75628ae28fd3 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 800.217242] env[62070]: WARNING nova.compute.manager [req-23860f58-3ea0-46f3-b2d2-b0576d78208d req-d71ad05c-83d9-4c48-af00-cc85c9a321b6 service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Received unexpected event network-vif-plugged-848ce3e0-8d08-460b-b770-75628ae28fd3 for instance with vm_state active and task_state None. [ 800.217645] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eab2ce4f-be38-4365-a1c0-93b8d461ea5f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.243817] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 800.243817] env[62070]: value = "task-1121696" [ 800.243817] env[62070]: _type = "Task" [ 800.243817] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.254636] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121696, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.264756] env[62070]: DEBUG nova.network.neutron [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Successfully created port: 90d77a95-7927-420d-8c8d-2f64e885a145 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 800.415232] env[62070]: DEBUG nova.compute.manager [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 800.419199] env[62070]: DEBUG nova.network.neutron [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Updating instance_info_cache with network_info: [{"id": "8ed3d649-bc61-493f-b8e9-2e7f7fad49ed", "address": "fa:16:3e:c7:ff:e4", "network": {"id": "4888f989-958d-49ff-bf5a-06873e4cc624", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-906255456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d079c0ef3ed745fcaf69dc728dca4466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ed3d649-bc", "ovs_interfaceid": "8ed3d649-bc61-493f-b8e9-2e7f7fad49ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.454325] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121695, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081128} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.454616] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 800.455671] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82168a5b-946f-4396-bf28-69deecfce6d1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.482632] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Reconfiguring VM instance instance-00000038 to attach disk [datastore2] 2368b649-f931-454c-92cc-971df4155d90/2368b649-f931-454c-92cc-971df4155d90.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 800.486240] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c91fc498-37ec-4283-9b5e-b5d7f632c2ef {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.513026] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 800.513026] env[62070]: value = "task-1121697" [ 800.513026] env[62070]: _type = "Task" [ 800.513026] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.525830] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121697, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.718701] env[62070]: DEBUG oslo_concurrency.lockutils [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "refresh_cache-0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 800.718902] env[62070]: DEBUG oslo_concurrency.lockutils [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired lock "refresh_cache-0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.719107] env[62070]: DEBUG nova.network.neutron [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 800.756438] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121696, 'name': CreateVM_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.929228] env[62070]: DEBUG oslo_concurrency.lockutils [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Releasing lock "refresh_cache-c3c6e93c-80be-4e71-87fb-2ff8db8d30fe" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.929606] env[62070]: DEBUG nova.compute.manager [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Instance network_info: |[{"id": "8ed3d649-bc61-493f-b8e9-2e7f7fad49ed", "address": "fa:16:3e:c7:ff:e4", "network": {"id": "4888f989-958d-49ff-bf5a-06873e4cc624", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-906255456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d079c0ef3ed745fcaf69dc728dca4466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ed3d649-bc", "ovs_interfaceid": "8ed3d649-bc61-493f-b8e9-2e7f7fad49ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 800.930374] env[62070]: DEBUG oslo_concurrency.lockutils [req-e6e50844-6e1a-4c30-b5c6-80eb92a22a78 req-3b449edb-59b7-47cf-8d2a-5d3857358801 service nova] Acquired lock "refresh_cache-c3c6e93c-80be-4e71-87fb-2ff8db8d30fe" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.931024] env[62070]: DEBUG nova.network.neutron [req-e6e50844-6e1a-4c30-b5c6-80eb92a22a78 req-3b449edb-59b7-47cf-8d2a-5d3857358801 service nova] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Refreshing network info cache for port 8ed3d649-bc61-493f-b8e9-2e7f7fad49ed {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 800.931836] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:ff:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7e0240aa-a694-48fc-a0f9-6f2d3e71aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8ed3d649-bc61-493f-b8e9-2e7f7fad49ed', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 800.939650] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Creating folder: Project (d079c0ef3ed745fcaf69dc728dca4466). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 800.943428] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d41c7551-9b0d-4ec6-9da3-ec9be2024830 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.960492] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Created folder: Project (d079c0ef3ed745fcaf69dc728dca4466) in parent group-v245319. [ 800.960754] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Creating folder: Instances. Parent ref: group-v245398. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 800.961049] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a90e0436-7ca6-4f10-b82a-3f67e74dbd11 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.974219] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Created folder: Instances in parent group-v245398. [ 800.974531] env[62070]: DEBUG oslo.service.loopingcall [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 800.974769] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 800.974995] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-772bb238-ff68-4f12-af5f-96175c5b0dc9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.001616] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 801.001616] env[62070]: value = "task-1121700" [ 801.001616] env[62070]: _type = "Task" [ 801.001616] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.006931] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e554537d-9698-47ff-96cb-bb835cea5ab8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.016194] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121700, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.022827] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92fb3696-b0ca-4152-95cd-603e2e40d66e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.033468] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121697, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.062601] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd093059-3f07-4c2b-a18c-06ded2205b48 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.071938] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a0dd67-6191-4a44-a74c-32d20e17c960 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.089147] env[62070]: DEBUG nova.compute.provider_tree [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 801.257389] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121696, 'name': CreateVM_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.273693] env[62070]: WARNING nova.network.neutron [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] 48dc51c7-cfa4-452e-9d72-2968d9a40dfa already exists in list: networks containing: ['48dc51c7-cfa4-452e-9d72-2968d9a40dfa']. ignoring it [ 801.273922] env[62070]: WARNING nova.network.neutron [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] 48dc51c7-cfa4-452e-9d72-2968d9a40dfa already exists in list: networks containing: ['48dc51c7-cfa4-452e-9d72-2968d9a40dfa']. ignoring it [ 801.431451] env[62070]: DEBUG nova.compute.manager [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 801.458063] env[62070]: DEBUG nova.virt.hardware [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 801.458216] env[62070]: DEBUG nova.virt.hardware [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 801.458468] env[62070]: DEBUG nova.virt.hardware [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 801.458739] env[62070]: DEBUG nova.virt.hardware [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 801.458933] env[62070]: DEBUG nova.virt.hardware [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 801.459164] env[62070]: DEBUG nova.virt.hardware [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 801.459486] env[62070]: DEBUG nova.virt.hardware [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 801.459708] env[62070]: DEBUG nova.virt.hardware [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 801.459952] env[62070]: DEBUG nova.virt.hardware [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 801.460243] env[62070]: DEBUG nova.virt.hardware [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 801.460467] env[62070]: DEBUG nova.virt.hardware [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 801.462931] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761667e4-1c7e-471e-9b60-0943e3680bd4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.472029] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6dcb88b-9eb8-444f-8e7e-0333f0c98c45 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.514306] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121700, 'name': CreateVM_Task, 'duration_secs': 0.466463} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.517197] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 801.518149] env[62070]: DEBUG oslo_concurrency.lockutils [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.518438] env[62070]: DEBUG oslo_concurrency.lockutils [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.518815] env[62070]: DEBUG oslo_concurrency.lockutils [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 801.522327] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0c1c0d5-a493-4d66-947a-46b7fbc0af28 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.528263] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 801.528263] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52f9c98e-4aec-3414-7416-70196491049b" [ 801.528263] env[62070]: _type = "Task" [ 801.528263] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.531794] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121697, 'name': ReconfigVM_Task, 'duration_secs': 0.686006} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.535012] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Reconfigured VM instance instance-00000038 to attach disk [datastore2] 2368b649-f931-454c-92cc-971df4155d90/2368b649-f931-454c-92cc-971df4155d90.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 801.535706] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-93c0b76b-c9ce-4bfc-be78-fac8e3085546 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.543351] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52f9c98e-4aec-3414-7416-70196491049b, 'name': SearchDatastore_Task, 'duration_secs': 0.010457} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.544787] env[62070]: DEBUG oslo_concurrency.lockutils [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 801.545098] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 801.545411] env[62070]: DEBUG oslo_concurrency.lockutils [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.545570] env[62070]: DEBUG oslo_concurrency.lockutils [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.545758] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 801.546156] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 801.546156] env[62070]: value = "task-1121701" [ 801.546156] env[62070]: _type = "Task" [ 801.546156] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.546369] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-70b85bed-8c6e-4e3e-8601-fe7141d78581 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.557815] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121701, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.559429] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 801.559618] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 801.560357] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c004ad59-8685-4fd0-a724-566332fb308d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.567703] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 801.567703] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52f91eb7-53d2-f4a1-a7a2-f16e9acb1b8f" [ 801.567703] env[62070]: _type = "Task" [ 801.567703] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.580191] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52f91eb7-53d2-f4a1-a7a2-f16e9acb1b8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.628735] env[62070]: DEBUG nova.scheduler.client.report [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Updated inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with generation 82 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 801.629039] env[62070]: DEBUG nova.compute.provider_tree [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Updating resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 generation from 82 to 83 during operation: update_inventory {{(pid=62070) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 801.629301] env[62070]: DEBUG nova.compute.provider_tree [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 801.757481] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121696, 'name': CreateVM_Task, 'duration_secs': 1.13887} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.760245] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 801.760885] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.761100] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.761441] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 801.761992] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a651938d-c513-4c0f-ab9f-11b421864ff0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.768219] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 801.768219] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ca1ab2-da0e-3966-b7f3-07fdeb849c0e" [ 801.768219] env[62070]: _type = "Task" [ 801.768219] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.777779] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52ca1ab2-da0e-3966-b7f3-07fdeb849c0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.790621] env[62070]: DEBUG nova.network.neutron [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Successfully updated port: 90d77a95-7927-420d-8c8d-2f64e885a145 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 801.906880] env[62070]: DEBUG nova.network.neutron [req-e6e50844-6e1a-4c30-b5c6-80eb92a22a78 req-3b449edb-59b7-47cf-8d2a-5d3857358801 service nova] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Updated VIF entry in instance network info cache for port 8ed3d649-bc61-493f-b8e9-2e7f7fad49ed. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 801.907347] env[62070]: DEBUG nova.network.neutron [req-e6e50844-6e1a-4c30-b5c6-80eb92a22a78 req-3b449edb-59b7-47cf-8d2a-5d3857358801 service nova] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Updating instance_info_cache with network_info: [{"id": "8ed3d649-bc61-493f-b8e9-2e7f7fad49ed", "address": "fa:16:3e:c7:ff:e4", "network": {"id": "4888f989-958d-49ff-bf5a-06873e4cc624", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-906255456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d079c0ef3ed745fcaf69dc728dca4466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ed3d649-bc", "ovs_interfaceid": "8ed3d649-bc61-493f-b8e9-2e7f7fad49ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.059479] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121701, 'name': Rename_Task, 'duration_secs': 0.293677} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.059874] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 802.060035] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ce6878dd-4de8-4cd7-aa71-60f6cb344ca8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.068081] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 802.068081] env[62070]: value = "task-1121702" [ 802.068081] env[62070]: _type = "Task" [ 802.068081] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.083130] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52f91eb7-53d2-f4a1-a7a2-f16e9acb1b8f, 'name': SearchDatastore_Task, 'duration_secs': 0.010736} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.083359] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121702, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.084442] env[62070]: DEBUG nova.network.neutron [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Updating instance_info_cache with network_info: [{"id": "6326b098-3c76-4152-b623-8921285ec01b", "address": "fa:16:3e:cc:12:22", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6326b098-3c", "ovs_interfaceid": "6326b098-3c76-4152-b623-8921285ec01b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7df84135-5c3d-48c5-b2cf-176e77094879", "address": "fa:16:3e:b9:a9:95", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7df84135-5c", "ovs_interfaceid": "7df84135-5c3d-48c5-b2cf-176e77094879", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "848ce3e0-8d08-460b-b770-75628ae28fd3", "address": "fa:16:3e:28:b0:1e", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap848ce3e0-8d", "ovs_interfaceid": "848ce3e0-8d08-460b-b770-75628ae28fd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.086719] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77d67948-3a9b-4b88-b86c-a5daef25ad48 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.092849] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 802.092849] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52cb88eb-f206-bde6-85cf-22a959221f3b" [ 802.092849] env[62070]: _type = "Task" [ 802.092849] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.105606] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52cb88eb-f206-bde6-85cf-22a959221f3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.135132] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.733s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.135654] env[62070]: DEBUG nova.compute.manager [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 802.138280] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.065s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.139704] env[62070]: INFO nova.compute.claims [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 802.279552] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52ca1ab2-da0e-3966-b7f3-07fdeb849c0e, 'name': SearchDatastore_Task, 'duration_secs': 0.010253} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.279907] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.280172] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 802.280411] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.293276] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Acquiring lock "refresh_cache-efef4aac-5b74-4a41-9f74-3d4cb4f80cdb" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.293421] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Acquired lock "refresh_cache-efef4aac-5b74-4a41-9f74-3d4cb4f80cdb" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.293575] env[62070]: DEBUG nova.network.neutron [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 802.390315] env[62070]: DEBUG nova.compute.manager [req-cfea83ec-ce18-4261-b341-d9b7f3686027 req-1d890c3f-49c8-491e-b3b1-a19d548460eb service nova] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Received event network-vif-plugged-90d77a95-7927-420d-8c8d-2f64e885a145 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 802.391152] env[62070]: DEBUG oslo_concurrency.lockutils [req-cfea83ec-ce18-4261-b341-d9b7f3686027 req-1d890c3f-49c8-491e-b3b1-a19d548460eb service nova] Acquiring lock "efef4aac-5b74-4a41-9f74-3d4cb4f80cdb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.391436] env[62070]: DEBUG oslo_concurrency.lockutils [req-cfea83ec-ce18-4261-b341-d9b7f3686027 req-1d890c3f-49c8-491e-b3b1-a19d548460eb service nova] Lock "efef4aac-5b74-4a41-9f74-3d4cb4f80cdb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.391701] env[62070]: DEBUG oslo_concurrency.lockutils [req-cfea83ec-ce18-4261-b341-d9b7f3686027 req-1d890c3f-49c8-491e-b3b1-a19d548460eb service nova] Lock "efef4aac-5b74-4a41-9f74-3d4cb4f80cdb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.391939] env[62070]: DEBUG nova.compute.manager [req-cfea83ec-ce18-4261-b341-d9b7f3686027 req-1d890c3f-49c8-491e-b3b1-a19d548460eb service nova] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] No waiting events found dispatching network-vif-plugged-90d77a95-7927-420d-8c8d-2f64e885a145 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 802.392188] env[62070]: WARNING nova.compute.manager [req-cfea83ec-ce18-4261-b341-d9b7f3686027 req-1d890c3f-49c8-491e-b3b1-a19d548460eb service nova] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Received unexpected event network-vif-plugged-90d77a95-7927-420d-8c8d-2f64e885a145 for instance with vm_state building and task_state spawning. [ 802.392404] env[62070]: DEBUG nova.compute.manager [req-cfea83ec-ce18-4261-b341-d9b7f3686027 req-1d890c3f-49c8-491e-b3b1-a19d548460eb service nova] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Received event network-changed-90d77a95-7927-420d-8c8d-2f64e885a145 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 802.392602] env[62070]: DEBUG nova.compute.manager [req-cfea83ec-ce18-4261-b341-d9b7f3686027 req-1d890c3f-49c8-491e-b3b1-a19d548460eb service nova] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Refreshing instance network info cache due to event network-changed-90d77a95-7927-420d-8c8d-2f64e885a145. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 802.392837] env[62070]: DEBUG oslo_concurrency.lockutils [req-cfea83ec-ce18-4261-b341-d9b7f3686027 req-1d890c3f-49c8-491e-b3b1-a19d548460eb service nova] Acquiring lock "refresh_cache-efef4aac-5b74-4a41-9f74-3d4cb4f80cdb" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.410023] env[62070]: DEBUG oslo_concurrency.lockutils [req-e6e50844-6e1a-4c30-b5c6-80eb92a22a78 req-3b449edb-59b7-47cf-8d2a-5d3857358801 service nova] Releasing lock "refresh_cache-c3c6e93c-80be-4e71-87fb-2ff8db8d30fe" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.548036] env[62070]: DEBUG nova.compute.manager [req-907ca6bc-7a5f-46ce-a78b-80ed4c96544b req-5ab5ccdd-8564-4e57-ad1f-690d29c70d7b service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Received event network-changed-848ce3e0-8d08-460b-b770-75628ae28fd3 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 802.548286] env[62070]: DEBUG nova.compute.manager [req-907ca6bc-7a5f-46ce-a78b-80ed4c96544b req-5ab5ccdd-8564-4e57-ad1f-690d29c70d7b service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Refreshing instance network info cache due to event network-changed-848ce3e0-8d08-460b-b770-75628ae28fd3. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 802.548566] env[62070]: DEBUG oslo_concurrency.lockutils [req-907ca6bc-7a5f-46ce-a78b-80ed4c96544b req-5ab5ccdd-8564-4e57-ad1f-690d29c70d7b service nova] Acquiring lock "refresh_cache-0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.578382] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121702, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.587671] env[62070]: DEBUG oslo_concurrency.lockutils [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Releasing lock "refresh_cache-0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.588447] env[62070]: DEBUG oslo_concurrency.lockutils [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.588645] env[62070]: DEBUG oslo_concurrency.lockutils [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired lock "0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.589053] env[62070]: DEBUG oslo_concurrency.lockutils [req-907ca6bc-7a5f-46ce-a78b-80ed4c96544b req-5ab5ccdd-8564-4e57-ad1f-690d29c70d7b service nova] Acquired lock "refresh_cache-0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.589301] env[62070]: DEBUG nova.network.neutron [req-907ca6bc-7a5f-46ce-a78b-80ed4c96544b req-5ab5ccdd-8564-4e57-ad1f-690d29c70d7b service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Refreshing network info cache for port 848ce3e0-8d08-460b-b770-75628ae28fd3 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 802.591437] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca886c14-4c1f-4271-98b4-bc39d1d7a696 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.607209] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52cb88eb-f206-bde6-85cf-22a959221f3b, 'name': SearchDatastore_Task, 'duration_secs': 0.010747} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.620927] env[62070]: DEBUG oslo_concurrency.lockutils [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.621351] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] c3c6e93c-80be-4e71-87fb-2ff8db8d30fe/c3c6e93c-80be-4e71-87fb-2ff8db8d30fe.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 802.622610] env[62070]: DEBUG nova.virt.hardware [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 802.622835] env[62070]: DEBUG nova.virt.hardware [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 802.622996] env[62070]: DEBUG nova.virt.hardware [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 802.623202] env[62070]: DEBUG nova.virt.hardware [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 802.623352] env[62070]: DEBUG nova.virt.hardware [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 802.623499] env[62070]: DEBUG nova.virt.hardware [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 802.623702] env[62070]: DEBUG nova.virt.hardware [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 802.623863] env[62070]: DEBUG nova.virt.hardware [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 802.624044] env[62070]: DEBUG nova.virt.hardware [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 802.624216] env[62070]: DEBUG nova.virt.hardware [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 802.624405] env[62070]: DEBUG nova.virt.hardware [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 802.630661] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Reconfiguring VM to attach interface {{(pid=62070) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1929}} [ 802.631068] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.631286] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 802.631520] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d09df695-a636-4cac-b0a9-405d60ae7912 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.633958] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c9f853d-2bbe-49da-8185-d72778184436 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.646637] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-46c19a73-94fc-4be0-9da9-1832a4776d2e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.649448] env[62070]: DEBUG nova.compute.utils [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 802.653113] env[62070]: DEBUG nova.compute.manager [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 802.653287] env[62070]: DEBUG nova.network.neutron [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 802.660286] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 802.660286] env[62070]: value = "task-1121703" [ 802.660286] env[62070]: _type = "Task" [ 802.660286] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.667223] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 802.667455] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 802.668697] env[62070]: DEBUG oslo_vmware.api [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 802.668697] env[62070]: value = "task-1121704" [ 802.668697] env[62070]: _type = "Task" [ 802.668697] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.669332] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b24916af-b302-4069-bce5-51642a295d09 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.678077] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121703, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.685255] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 802.685255] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]525e3c2f-5d6f-2144-8651-256a11d5f45f" [ 802.685255] env[62070]: _type = "Task" [ 802.685255] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.685502] env[62070]: DEBUG oslo_vmware.api [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121704, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.695989] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]525e3c2f-5d6f-2144-8651-256a11d5f45f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.712529] env[62070]: DEBUG nova.policy [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '093d4b68ffd04d4d951f5be91bfc76e8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eac8e5edc8f14fff89aba7c8cb6cac5d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 802.857696] env[62070]: DEBUG nova.network.neutron [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 803.081172] env[62070]: DEBUG oslo_vmware.api [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121702, 'name': PowerOnVM_Task, 'duration_secs': 0.531261} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.081573] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 803.081906] env[62070]: INFO nova.compute.manager [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Took 10.44 seconds to spawn the instance on the hypervisor. [ 803.082180] env[62070]: DEBUG nova.compute.manager [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 803.083017] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7727c159-7dc6-4ab7-9ef7-7b318d99a51e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.153698] env[62070]: DEBUG nova.compute.manager [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 803.173982] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121703, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.185308] env[62070]: DEBUG oslo_vmware.api [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121704, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.200217] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]525e3c2f-5d6f-2144-8651-256a11d5f45f, 'name': SearchDatastore_Task, 'duration_secs': 0.014473} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.201263] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f36c529-80ff-4c7d-a4fa-6dae577c2712 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.208841] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 803.208841] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52caa3c0-658e-2d8e-be49-e4e0ef227cc6" [ 803.208841] env[62070]: _type = "Task" [ 803.208841] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.219238] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52caa3c0-658e-2d8e-be49-e4e0ef227cc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.223215] env[62070]: DEBUG nova.network.neutron [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Updating instance_info_cache with network_info: [{"id": "90d77a95-7927-420d-8c8d-2f64e885a145", "address": "fa:16:3e:84:92:9a", "network": {"id": "94663b26-38d9-4aac-b575-d3611bb6011f", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1629884140-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fa45cca1a114c0aa1e93a50672626ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90d77a95-79", "ovs_interfaceid": "90d77a95-7927-420d-8c8d-2f64e885a145", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.334425] env[62070]: DEBUG nova.network.neutron [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Successfully created port: 11a1a24a-57ad-4bbf-9f1d-58391009ddde {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 803.375904] env[62070]: DEBUG nova.network.neutron [req-907ca6bc-7a5f-46ce-a78b-80ed4c96544b req-5ab5ccdd-8564-4e57-ad1f-690d29c70d7b service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Updated VIF entry in instance network info cache for port 848ce3e0-8d08-460b-b770-75628ae28fd3. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 803.376543] env[62070]: DEBUG nova.network.neutron [req-907ca6bc-7a5f-46ce-a78b-80ed4c96544b req-5ab5ccdd-8564-4e57-ad1f-690d29c70d7b service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Updating instance_info_cache with network_info: [{"id": "6326b098-3c76-4152-b623-8921285ec01b", "address": "fa:16:3e:cc:12:22", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6326b098-3c", "ovs_interfaceid": "6326b098-3c76-4152-b623-8921285ec01b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7df84135-5c3d-48c5-b2cf-176e77094879", "address": "fa:16:3e:b9:a9:95", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7df84135-5c", "ovs_interfaceid": "7df84135-5c3d-48c5-b2cf-176e77094879", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "848ce3e0-8d08-460b-b770-75628ae28fd3", "address": "fa:16:3e:28:b0:1e", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap848ce3e0-8d", "ovs_interfaceid": "848ce3e0-8d08-460b-b770-75628ae28fd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.604025] env[62070]: INFO nova.compute.manager [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Took 43.28 seconds to build instance. [ 803.678926] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121703, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.647179} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.687670] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] c3c6e93c-80be-4e71-87fb-2ff8db8d30fe/c3c6e93c-80be-4e71-87fb-2ff8db8d30fe.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 803.688085] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 803.688773] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a50f1994-8afd-456b-b65a-fde0cacb0042 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.700242] env[62070]: DEBUG oslo_vmware.api [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121704, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.705827] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 803.705827] env[62070]: value = "task-1121705" [ 803.705827] env[62070]: _type = "Task" [ 803.705827] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.719225] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121705, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.725904] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52caa3c0-658e-2d8e-be49-e4e0ef227cc6, 'name': SearchDatastore_Task, 'duration_secs': 0.056777} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.726454] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Releasing lock "refresh_cache-efef4aac-5b74-4a41-9f74-3d4cb4f80cdb" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.726669] env[62070]: DEBUG nova.compute.manager [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Instance network_info: |[{"id": "90d77a95-7927-420d-8c8d-2f64e885a145", "address": "fa:16:3e:84:92:9a", "network": {"id": "94663b26-38d9-4aac-b575-d3611bb6011f", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1629884140-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fa45cca1a114c0aa1e93a50672626ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90d77a95-79", "ovs_interfaceid": "90d77a95-7927-420d-8c8d-2f64e885a145", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 803.727173] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.727437] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] f75ed36e-16c8-4a6b-bd39-eb4057ef0691/f75ed36e-16c8-4a6b-bd39-eb4057ef0691.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 803.727750] env[62070]: DEBUG oslo_concurrency.lockutils [req-cfea83ec-ce18-4261-b341-d9b7f3686027 req-1d890c3f-49c8-491e-b3b1-a19d548460eb service nova] Acquired lock "refresh_cache-efef4aac-5b74-4a41-9f74-3d4cb4f80cdb" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.727956] env[62070]: DEBUG nova.network.neutron [req-cfea83ec-ce18-4261-b341-d9b7f3686027 req-1d890c3f-49c8-491e-b3b1-a19d548460eb service nova] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Refreshing network info cache for port 90d77a95-7927-420d-8c8d-2f64e885a145 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 803.729136] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:92:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '90d77a95-7927-420d-8c8d-2f64e885a145', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 803.737125] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Creating folder: Project (8fa45cca1a114c0aa1e93a50672626ef). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 803.737877] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-539fb0da-88ea-4cb7-827b-f459d6c1ef4c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.740449] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4b6f9bc5-e3e7-4761-a0f1-884e13f37232 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.751349] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 803.751349] env[62070]: value = "task-1121707" [ 803.751349] env[62070]: _type = "Task" [ 803.751349] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.756980] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Created folder: Project (8fa45cca1a114c0aa1e93a50672626ef) in parent group-v245319. [ 803.757274] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Creating folder: Instances. Parent ref: group-v245401. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 803.760069] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d0475161-11d2-4f61-8460-831af45bd64d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.766024] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121707, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.777060] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Created folder: Instances in parent group-v245401. [ 803.777373] env[62070]: DEBUG oslo.service.loopingcall [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 803.777595] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 803.778856] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0398d982-1cc6-4cf9-a04b-ac6155d790ec {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.794935] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08503008-f774-4998-8bde-6dbfb319b21a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.805161] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c936100-00d1-4d0d-acaa-a7238670fe73 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.808645] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 803.808645] env[62070]: value = "task-1121709" [ 803.808645] env[62070]: _type = "Task" [ 803.808645] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.841370] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53c67c3-86c4-4a11-92d5-b1990b8ccde0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.848431] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121709, 'name': CreateVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.855596] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ffe853-bcf5-46e5-90c3-f5e5d166898e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.871795] env[62070]: DEBUG nova.compute.provider_tree [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 803.879419] env[62070]: DEBUG oslo_concurrency.lockutils [req-907ca6bc-7a5f-46ce-a78b-80ed4c96544b req-5ab5ccdd-8564-4e57-ad1f-690d29c70d7b service nova] Releasing lock "refresh_cache-0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.105357] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2de264e4-258a-4ba0-b64b-88da6d460fb5 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "2368b649-f931-454c-92cc-971df4155d90" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 122.964s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.173316] env[62070]: DEBUG nova.compute.manager [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 804.191684] env[62070]: DEBUG oslo_vmware.api [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121704, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.200805] env[62070]: DEBUG nova.virt.hardware [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 804.201133] env[62070]: DEBUG nova.virt.hardware [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 804.201327] env[62070]: DEBUG nova.virt.hardware [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 804.201526] env[62070]: DEBUG nova.virt.hardware [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 804.201681] env[62070]: DEBUG nova.virt.hardware [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 804.201834] env[62070]: DEBUG nova.virt.hardware [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 804.202058] env[62070]: DEBUG nova.virt.hardware [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 804.202260] env[62070]: DEBUG nova.virt.hardware [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 804.202449] env[62070]: DEBUG nova.virt.hardware [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 804.202618] env[62070]: DEBUG nova.virt.hardware [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 804.202801] env[62070]: DEBUG nova.virt.hardware [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 804.204973] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fba74fa-e4b1-4146-bbbe-25eb8b5a83f7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.219300] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121705, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.147947} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.221911] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 804.223078] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e91e339d-a68e-494d-a837-7559e3063535 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.226617] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34b7130b-037a-4358-a6d6-9e282260cb18 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.260890] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] c3c6e93c-80be-4e71-87fb-2ff8db8d30fe/c3c6e93c-80be-4e71-87fb-2ff8db8d30fe.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 804.269616] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9e1d3a6-8267-428f-9430-d5d77234ffab {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.295444] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121707, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.296520] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 804.296520] env[62070]: value = "task-1121710" [ 804.296520] env[62070]: _type = "Task" [ 804.296520] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.306573] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121710, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.319392] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121709, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.377211] env[62070]: DEBUG nova.scheduler.client.report [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 804.515659] env[62070]: DEBUG nova.network.neutron [req-cfea83ec-ce18-4261-b341-d9b7f3686027 req-1d890c3f-49c8-491e-b3b1-a19d548460eb service nova] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Updated VIF entry in instance network info cache for port 90d77a95-7927-420d-8c8d-2f64e885a145. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 804.516231] env[62070]: DEBUG nova.network.neutron [req-cfea83ec-ce18-4261-b341-d9b7f3686027 req-1d890c3f-49c8-491e-b3b1-a19d548460eb service nova] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Updating instance_info_cache with network_info: [{"id": "90d77a95-7927-420d-8c8d-2f64e885a145", "address": "fa:16:3e:84:92:9a", "network": {"id": "94663b26-38d9-4aac-b575-d3611bb6011f", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1629884140-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fa45cca1a114c0aa1e93a50672626ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90d77a95-79", "ovs_interfaceid": "90d77a95-7927-420d-8c8d-2f64e885a145", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.608372] env[62070]: DEBUG nova.compute.manager [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 804.693434] env[62070]: DEBUG oslo_vmware.api [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121704, 'name': ReconfigVM_Task, 'duration_secs': 1.828082} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.694128] env[62070]: DEBUG oslo_concurrency.lockutils [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Releasing lock "0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.694451] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Reconfigured VM to attach interface {{(pid=62070) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1943}} [ 804.782760] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121707, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.812503] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121710, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.824390] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121709, 'name': CreateVM_Task, 'duration_secs': 0.657588} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.824666] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 804.825529] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.825771] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.826219] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 804.826552] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-881bcf5b-5a0a-41fc-8cb5-050cc1578f25 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.834132] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Waiting for the task: (returnval){ [ 804.834132] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52e8ca8a-afc9-6aad-05e9-0758775d05d7" [ 804.834132] env[62070]: _type = "Task" [ 804.834132] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.845696] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52e8ca8a-afc9-6aad-05e9-0758775d05d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.883511] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.745s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.884036] env[62070]: DEBUG nova.compute.manager [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 804.887090] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.607s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.888657] env[62070]: INFO nova.compute.claims [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 805.020405] env[62070]: DEBUG oslo_concurrency.lockutils [req-cfea83ec-ce18-4261-b341-d9b7f3686027 req-1d890c3f-49c8-491e-b3b1-a19d548460eb service nova] Releasing lock "refresh_cache-efef4aac-5b74-4a41-9f74-3d4cb4f80cdb" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.063891] env[62070]: DEBUG nova.network.neutron [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Successfully updated port: 11a1a24a-57ad-4bbf-9f1d-58391009ddde {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 805.132487] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.199120] env[62070]: DEBUG oslo_concurrency.lockutils [None req-929590f7-b02f-4339-9d80-5d6bbb4a18d7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "interface-0ac963b1-120a-464b-8228-3393135dd182-848ce3e0-8d08-460b-b770-75628ae28fd3" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.516s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.213442] env[62070]: DEBUG nova.compute.manager [req-cdd82f42-1d14-4fae-9573-3e3ccc32d757 req-efbe7ba9-a665-474f-abb6-20c26edbd8d5 service nova] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Received event network-vif-plugged-11a1a24a-57ad-4bbf-9f1d-58391009ddde {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 805.214426] env[62070]: DEBUG oslo_concurrency.lockutils [req-cdd82f42-1d14-4fae-9573-3e3ccc32d757 req-efbe7ba9-a665-474f-abb6-20c26edbd8d5 service nova] Acquiring lock "328fbc92-8162-4e12-a02d-6e9cafe0c365-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.214426] env[62070]: DEBUG oslo_concurrency.lockutils [req-cdd82f42-1d14-4fae-9573-3e3ccc32d757 req-efbe7ba9-a665-474f-abb6-20c26edbd8d5 service nova] Lock "328fbc92-8162-4e12-a02d-6e9cafe0c365-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.214426] env[62070]: DEBUG oslo_concurrency.lockutils [req-cdd82f42-1d14-4fae-9573-3e3ccc32d757 req-efbe7ba9-a665-474f-abb6-20c26edbd8d5 service nova] Lock "328fbc92-8162-4e12-a02d-6e9cafe0c365-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.214532] env[62070]: DEBUG nova.compute.manager [req-cdd82f42-1d14-4fae-9573-3e3ccc32d757 req-efbe7ba9-a665-474f-abb6-20c26edbd8d5 service nova] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] No waiting events found dispatching network-vif-plugged-11a1a24a-57ad-4bbf-9f1d-58391009ddde {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 805.214721] env[62070]: WARNING nova.compute.manager [req-cdd82f42-1d14-4fae-9573-3e3ccc32d757 req-efbe7ba9-a665-474f-abb6-20c26edbd8d5 service nova] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Received unexpected event network-vif-plugged-11a1a24a-57ad-4bbf-9f1d-58391009ddde for instance with vm_state building and task_state spawning. [ 805.279737] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121707, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.191099} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.280034] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] f75ed36e-16c8-4a6b-bd39-eb4057ef0691/f75ed36e-16c8-4a6b-bd39-eb4057ef0691.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 805.280265] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 805.280532] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7b2219ce-1ed6-498e-9b6a-cdb5debd08d3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.290894] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 805.290894] env[62070]: value = "task-1121711" [ 805.290894] env[62070]: _type = "Task" [ 805.290894] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.301189] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121711, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.311611] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121710, 'name': ReconfigVM_Task, 'duration_secs': 0.802244} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.311899] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Reconfigured VM instance instance-0000003a to attach disk [datastore2] c3c6e93c-80be-4e71-87fb-2ff8db8d30fe/c3c6e93c-80be-4e71-87fb-2ff8db8d30fe.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 805.312584] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a8a28874-e985-4d87-85c8-24665a1df956 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.319960] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 805.319960] env[62070]: value = "task-1121712" [ 805.319960] env[62070]: _type = "Task" [ 805.319960] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.329428] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121712, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.344621] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52e8ca8a-afc9-6aad-05e9-0758775d05d7, 'name': SearchDatastore_Task, 'duration_secs': 0.050821} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.345968] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.345968] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 805.345968] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.345968] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.345968] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 805.346419] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-252a346c-57dd-40ad-ab67-d7122f03359e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.358831] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 805.359086] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 805.359909] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e3dd163-a5e0-4220-b554-25fef0bebff8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.366821] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Waiting for the task: (returnval){ [ 805.366821] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52cab1b3-7097-54f8-efc8-58c5ecaf5a79" [ 805.366821] env[62070]: _type = "Task" [ 805.366821] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.376730] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52cab1b3-7097-54f8-efc8-58c5ecaf5a79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.393596] env[62070]: DEBUG nova.compute.utils [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 805.397646] env[62070]: DEBUG nova.compute.manager [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 805.398027] env[62070]: DEBUG nova.network.neutron [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 805.517865] env[62070]: DEBUG nova.policy [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58b377e0d90a45a89966048bd20f609f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1a94db233e3a43dc9aa7cf887c6cb1f6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 805.566258] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "refresh_cache-328fbc92-8162-4e12-a02d-6e9cafe0c365" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.566447] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquired lock "refresh_cache-328fbc92-8162-4e12-a02d-6e9cafe0c365" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.566607] env[62070]: DEBUG nova.network.neutron [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 805.802818] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121711, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080887} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.803212] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 805.804142] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e57cbb3-359e-4b29-bbbd-f0f92762965d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.838744] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] f75ed36e-16c8-4a6b-bd39-eb4057ef0691/f75ed36e-16c8-4a6b-bd39-eb4057ef0691.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 805.842360] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a23d8b5-4b9f-4876-a1ab-667fe6912a66 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.865767] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121712, 'name': Rename_Task, 'duration_secs': 0.162249} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.867494] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 805.867878] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 805.867878] env[62070]: value = "task-1121713" [ 805.867878] env[62070]: _type = "Task" [ 805.867878] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.868132] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f1223b4-4972-496d-9a0c-ed94c6a2a591 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.883316] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121713, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.888913] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 805.888913] env[62070]: value = "task-1121714" [ 805.888913] env[62070]: _type = "Task" [ 805.888913] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.889494] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52cab1b3-7097-54f8-efc8-58c5ecaf5a79, 'name': SearchDatastore_Task, 'duration_secs': 0.011881} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.894285] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-851d6bbd-5c19-47bb-914b-d45446f16a51 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.898169] env[62070]: DEBUG nova.compute.manager [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 805.912780] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121714, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.913265] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Waiting for the task: (returnval){ [ 805.913265] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]527e0a10-4c8f-1099-d00c-d091151cec21" [ 805.913265] env[62070]: _type = "Task" [ 805.913265] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.926669] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527e0a10-4c8f-1099-d00c-d091151cec21, 'name': SearchDatastore_Task, 'duration_secs': 0.015294} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.926669] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.926931] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] efef4aac-5b74-4a41-9f74-3d4cb4f80cdb/efef4aac-5b74-4a41-9f74-3d4cb4f80cdb.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 805.927276] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-35c6e7f0-3fbe-4723-9f7a-fa7ae4c77bbc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.936621] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Waiting for the task: (returnval){ [ 805.936621] env[62070]: value = "task-1121715" [ 805.936621] env[62070]: _type = "Task" [ 805.936621] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.940726] env[62070]: DEBUG nova.network.neutron [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Successfully created port: aef8b9b0-4bbd-4af6-b65d-f7e964775fd4 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 805.951872] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Task: {'id': task-1121715, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.107150] env[62070]: DEBUG nova.network.neutron [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 806.316463] env[62070]: DEBUG nova.network.neutron [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Updating instance_info_cache with network_info: [{"id": "11a1a24a-57ad-4bbf-9f1d-58391009ddde", "address": "fa:16:3e:61:72:30", "network": {"id": "5f4568f0-c3e8-497f-b7d6-8d92db2f4066", "bridge": "br-int", "label": "tempest-ImagesTestJSON-24352632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eac8e5edc8f14fff89aba7c8cb6cac5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11a1a24a-57", "ovs_interfaceid": "11a1a24a-57ad-4bbf-9f1d-58391009ddde", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.389333] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121713, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.396506] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba95289b-4d6e-427b-8399-186f83d9d39c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.403624] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121714, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.410342] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-830b1db0-affc-4358-9a2f-6778da368786 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.446769] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d86d3cb-f82d-4dda-9fff-10c6e483018c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.455775] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Task: {'id': task-1121715, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.460346] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02651e45-5028-4740-a6df-445764593f95 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.477717] env[62070]: DEBUG nova.compute.provider_tree [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 806.529969] env[62070]: DEBUG oslo_concurrency.lockutils [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Acquiring lock "748c94c7-1233-44f4-a71a-176b26518399" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.530492] env[62070]: DEBUG oslo_concurrency.lockutils [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Lock "748c94c7-1233-44f4-a71a-176b26518399" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.530863] env[62070]: DEBUG oslo_concurrency.lockutils [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Acquiring lock "748c94c7-1233-44f4-a71a-176b26518399-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.531322] env[62070]: DEBUG oslo_concurrency.lockutils [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Lock "748c94c7-1233-44f4-a71a-176b26518399-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.531391] env[62070]: DEBUG oslo_concurrency.lockutils [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Lock "748c94c7-1233-44f4-a71a-176b26518399-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.536704] env[62070]: INFO nova.compute.manager [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Terminating instance [ 806.537696] env[62070]: DEBUG nova.compute.manager [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 806.537916] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 806.539240] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9267aed-36d0-4448-a92f-b4bd3114d2db {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.550496] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 806.550795] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9e29002c-57b4-418b-9115-4c48bb5d795f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.563169] env[62070]: DEBUG oslo_vmware.api [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Waiting for the task: (returnval){ [ 806.563169] env[62070]: value = "task-1121716" [ 806.563169] env[62070]: _type = "Task" [ 806.563169] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.573461] env[62070]: DEBUG oslo_vmware.api [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Task: {'id': task-1121716, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.819388] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Releasing lock "refresh_cache-328fbc92-8162-4e12-a02d-6e9cafe0c365" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.819780] env[62070]: DEBUG nova.compute.manager [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Instance network_info: |[{"id": "11a1a24a-57ad-4bbf-9f1d-58391009ddde", "address": "fa:16:3e:61:72:30", "network": {"id": "5f4568f0-c3e8-497f-b7d6-8d92db2f4066", "bridge": "br-int", "label": "tempest-ImagesTestJSON-24352632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eac8e5edc8f14fff89aba7c8cb6cac5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11a1a24a-57", "ovs_interfaceid": "11a1a24a-57ad-4bbf-9f1d-58391009ddde", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 806.820313] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:72:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '11a1a24a-57ad-4bbf-9f1d-58391009ddde', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 806.828204] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Creating folder: Project (eac8e5edc8f14fff89aba7c8cb6cac5d). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 806.828528] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a92086c9-590a-49d1-8ce7-8ead072d52bd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.842169] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Created folder: Project (eac8e5edc8f14fff89aba7c8cb6cac5d) in parent group-v245319. [ 806.842388] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Creating folder: Instances. Parent ref: group-v245404. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 806.842650] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-55dc2e35-5731-4b09-9ea0-4a10ffb9a0de {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.853875] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Created folder: Instances in parent group-v245404. [ 806.854340] env[62070]: DEBUG oslo.service.loopingcall [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 806.854438] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 806.854662] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5d46cbc0-8a9a-4d1e-9dea-7938d1f01298 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.879592] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 806.879592] env[62070]: value = "task-1121719" [ 806.879592] env[62070]: _type = "Task" [ 806.879592] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.887160] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121713, 'name': ReconfigVM_Task, 'duration_secs': 0.592327} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.887941] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Reconfigured VM instance instance-00000039 to attach disk [datastore2] f75ed36e-16c8-4a6b-bd39-eb4057ef0691/f75ed36e-16c8-4a6b-bd39-eb4057ef0691.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 806.888757] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d9cd79f7-60f0-41e9-8b14-adf724255531 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.893551] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121719, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.900058] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 806.900058] env[62070]: value = "task-1121720" [ 806.900058] env[62070]: _type = "Task" [ 806.900058] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.905571] env[62070]: DEBUG oslo_vmware.api [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121714, 'name': PowerOnVM_Task, 'duration_secs': 0.932627} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.911070] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 806.911345] env[62070]: INFO nova.compute.manager [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Took 8.14 seconds to spawn the instance on the hypervisor. [ 806.911545] env[62070]: DEBUG nova.compute.manager [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 806.912706] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a3768d-8186-4d4b-891a-a4ae66488cf0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.919969] env[62070]: DEBUG nova.compute.manager [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 806.932251] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121720, 'name': Rename_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.950409] env[62070]: DEBUG nova.virt.hardware [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 806.950790] env[62070]: DEBUG nova.virt.hardware [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 806.951149] env[62070]: DEBUG nova.virt.hardware [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 806.951298] env[62070]: DEBUG nova.virt.hardware [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 806.951498] env[62070]: DEBUG nova.virt.hardware [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 806.951681] env[62070]: DEBUG nova.virt.hardware [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 806.951916] env[62070]: DEBUG nova.virt.hardware [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 806.952244] env[62070]: DEBUG nova.virt.hardware [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 806.952405] env[62070]: DEBUG nova.virt.hardware [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 806.952599] env[62070]: DEBUG nova.virt.hardware [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 806.952816] env[62070]: DEBUG nova.virt.hardware [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 806.953842] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49fc925-2bbd-46e8-a2c0-54bea498a3c2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.969509] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2efc8ed7-4988-4448-a1d6-c1e5728bac06 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.974246] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Task: {'id': task-1121715, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.624369} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.975227] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] efef4aac-5b74-4a41-9f74-3d4cb4f80cdb/efef4aac-5b74-4a41-9f74-3d4cb4f80cdb.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 806.975461] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 806.976334] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-65d03731-b2ff-4682-80af-3c89bc6f1bb0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.995481] env[62070]: DEBUG nova.scheduler.client.report [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 807.005692] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Waiting for the task: (returnval){ [ 807.005692] env[62070]: value = "task-1121721" [ 807.005692] env[62070]: _type = "Task" [ 807.005692] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.017211] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Task: {'id': task-1121721, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.075620] env[62070]: DEBUG oslo_vmware.api [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Task: {'id': task-1121716, 'name': PowerOffVM_Task, 'duration_secs': 0.324994} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.075949] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 807.076206] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 807.076592] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-70bbee25-4dbe-4bdb-a43c-f68ce72130ae {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.203623] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 807.203880] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 807.204059] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Deleting the datastore file [datastore2] 748c94c7-1233-44f4-a71a-176b26518399 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 807.204430] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-899f8bb8-1291-404b-b3da-f7d137490bd9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.213572] env[62070]: DEBUG oslo_vmware.api [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Waiting for the task: (returnval){ [ 807.213572] env[62070]: value = "task-1121723" [ 807.213572] env[62070]: _type = "Task" [ 807.213572] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.222732] env[62070]: DEBUG oslo_vmware.api [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Task: {'id': task-1121723, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.360298] env[62070]: DEBUG nova.compute.manager [req-6fff2961-25f8-4b83-bc2a-349e618cb09f req-b3cba487-a3e8-4993-b0b3-8285f9125492 service nova] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Received event network-changed-11a1a24a-57ad-4bbf-9f1d-58391009ddde {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 807.360646] env[62070]: DEBUG nova.compute.manager [req-6fff2961-25f8-4b83-bc2a-349e618cb09f req-b3cba487-a3e8-4993-b0b3-8285f9125492 service nova] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Refreshing instance network info cache due to event network-changed-11a1a24a-57ad-4bbf-9f1d-58391009ddde. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 807.360915] env[62070]: DEBUG oslo_concurrency.lockutils [req-6fff2961-25f8-4b83-bc2a-349e618cb09f req-b3cba487-a3e8-4993-b0b3-8285f9125492 service nova] Acquiring lock "refresh_cache-328fbc92-8162-4e12-a02d-6e9cafe0c365" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.360915] env[62070]: DEBUG oslo_concurrency.lockutils [req-6fff2961-25f8-4b83-bc2a-349e618cb09f req-b3cba487-a3e8-4993-b0b3-8285f9125492 service nova] Acquired lock "refresh_cache-328fbc92-8162-4e12-a02d-6e9cafe0c365" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.361521] env[62070]: DEBUG nova.network.neutron [req-6fff2961-25f8-4b83-bc2a-349e618cb09f req-b3cba487-a3e8-4993-b0b3-8285f9125492 service nova] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Refreshing network info cache for port 11a1a24a-57ad-4bbf-9f1d-58391009ddde {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 807.391498] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121719, 'name': CreateVM_Task, 'duration_secs': 0.510552} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.391739] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 807.392530] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.392739] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.393116] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 807.394031] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e1675e9-ffe8-4c46-8855-74b11d66bf92 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.400217] env[62070]: DEBUG oslo_vmware.api [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 807.400217] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5236c321-a517-5f87-e2e5-ef689b5c65ad" [ 807.400217] env[62070]: _type = "Task" [ 807.400217] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.413520] env[62070]: DEBUG oslo_vmware.api [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5236c321-a517-5f87-e2e5-ef689b5c65ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.420897] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121720, 'name': Rename_Task, 'duration_secs': 0.282557} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.421258] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 807.421553] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b26f0cc-61b3-47f1-92fa-e1aa71030fd1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.438707] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 807.438707] env[62070]: value = "task-1121724" [ 807.438707] env[62070]: _type = "Task" [ 807.438707] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.443096] env[62070]: INFO nova.compute.manager [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Took 37.89 seconds to build instance. [ 807.456718] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121724, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.503085] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.615s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.503085] env[62070]: DEBUG nova.compute.manager [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 807.507036] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.360s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.507276] env[62070]: DEBUG nova.objects.instance [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lazy-loading 'resources' on Instance uuid bcafa04d-904b-4eab-aba1-35180c2d4b22 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 807.523081] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Task: {'id': task-1121721, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080854} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.523433] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 807.524234] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e64e7ba9-110d-43e8-b3f5-07b9875de95e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.549399] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] efef4aac-5b74-4a41-9f74-3d4cb4f80cdb/efef4aac-5b74-4a41-9f74-3d4cb4f80cdb.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 807.550108] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26b67d3d-54cf-465b-857d-2f22673f9d07 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.576046] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Waiting for the task: (returnval){ [ 807.576046] env[62070]: value = "task-1121725" [ 807.576046] env[62070]: _type = "Task" [ 807.576046] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.583637] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Task: {'id': task-1121725, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.678064] env[62070]: DEBUG oslo_concurrency.lockutils [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "interface-0ac963b1-120a-464b-8228-3393135dd182-7df84135-5c3d-48c5-b2cf-176e77094879" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.678377] env[62070]: DEBUG oslo_concurrency.lockutils [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "interface-0ac963b1-120a-464b-8228-3393135dd182-7df84135-5c3d-48c5-b2cf-176e77094879" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.725010] env[62070]: DEBUG oslo_vmware.api [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Task: {'id': task-1121723, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.327768} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.725298] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 807.725492] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 807.725674] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 807.725848] env[62070]: INFO nova.compute.manager [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Took 1.19 seconds to destroy the instance on the hypervisor. [ 807.726124] env[62070]: DEBUG oslo.service.loopingcall [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 807.726345] env[62070]: DEBUG nova.compute.manager [-] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 807.726440] env[62070]: DEBUG nova.network.neutron [-] [instance: 748c94c7-1233-44f4-a71a-176b26518399] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 807.905612] env[62070]: DEBUG nova.network.neutron [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Successfully updated port: aef8b9b0-4bbd-4af6-b65d-f7e964775fd4 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 807.913587] env[62070]: DEBUG oslo_vmware.api [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5236c321-a517-5f87-e2e5-ef689b5c65ad, 'name': SearchDatastore_Task, 'duration_secs': 0.017786} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.913893] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 807.914323] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 807.914516] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.914692] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.914910] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 807.915224] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-986e9f8c-eed2-4924-824e-f570fc9e0f2c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.927317] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 807.927317] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 807.928653] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Acquiring lock "dd5d90e8-964a-4e1c-a98a-bcba37a1d79e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.928756] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Lock "dd5d90e8-964a-4e1c-a98a-bcba37a1d79e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.928967] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Acquiring lock "dd5d90e8-964a-4e1c-a98a-bcba37a1d79e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.929243] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Lock "dd5d90e8-964a-4e1c-a98a-bcba37a1d79e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.929833] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Lock "dd5d90e8-964a-4e1c-a98a-bcba37a1d79e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.931122] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-570a0e43-3573-47ed-ae3e-7d9252fe4d3f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.935427] env[62070]: INFO nova.compute.manager [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Terminating instance [ 807.936501] env[62070]: DEBUG nova.compute.manager [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 807.936737] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 807.937467] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1b3e4c92-dfcc-4971-90fa-d0941821726c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.941240] env[62070]: DEBUG oslo_vmware.api [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 807.941240] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52985f2d-bc36-a883-9c2a-ea72a001c10a" [ 807.941240] env[62070]: _type = "Task" [ 807.941240] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.946155] env[62070]: DEBUG oslo_vmware.api [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Waiting for the task: (returnval){ [ 807.946155] env[62070]: value = "task-1121726" [ 807.946155] env[62070]: _type = "Task" [ 807.946155] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.950285] env[62070]: DEBUG oslo_concurrency.lockutils [None req-79dfc138-9989-48ce-b920-501f82a9be5b tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "c3c6e93c-80be-4e71-87fb-2ff8db8d30fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 120.929s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.958552] env[62070]: DEBUG oslo_vmware.api [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52985f2d-bc36-a883-9c2a-ea72a001c10a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.964613] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121724, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.968553] env[62070]: DEBUG oslo_vmware.api [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121726, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.008403] env[62070]: DEBUG nova.compute.utils [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 808.012506] env[62070]: DEBUG nova.compute.manager [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 808.012842] env[62070]: DEBUG nova.network.neutron [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 808.092248] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Task: {'id': task-1121725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.111830] env[62070]: DEBUG nova.policy [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58b377e0d90a45a89966048bd20f609f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1a94db233e3a43dc9aa7cf887c6cb1f6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 808.181605] env[62070]: DEBUG oslo_concurrency.lockutils [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.181809] env[62070]: DEBUG oslo_concurrency.lockutils [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired lock "0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.183205] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d621bde3-9b2c-43b2-ba45-392bd02bb5b1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.209942] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3067d19e-b340-4099-8f61-ac9e8043cbd2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.248950] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Reconfiguring VM to detach interface {{(pid=62070) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1974}} [ 808.252488] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9f007642-f6d6-4182-a63e-64bfab96d595 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.275578] env[62070]: DEBUG oslo_vmware.api [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 808.275578] env[62070]: value = "task-1121727" [ 808.275578] env[62070]: _type = "Task" [ 808.275578] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.286990] env[62070]: DEBUG oslo_vmware.api [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121727, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.292995] env[62070]: DEBUG nova.network.neutron [req-6fff2961-25f8-4b83-bc2a-349e618cb09f req-b3cba487-a3e8-4993-b0b3-8285f9125492 service nova] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Updated VIF entry in instance network info cache for port 11a1a24a-57ad-4bbf-9f1d-58391009ddde. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 808.293482] env[62070]: DEBUG nova.network.neutron [req-6fff2961-25f8-4b83-bc2a-349e618cb09f req-b3cba487-a3e8-4993-b0b3-8285f9125492 service nova] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Updating instance_info_cache with network_info: [{"id": "11a1a24a-57ad-4bbf-9f1d-58391009ddde", "address": "fa:16:3e:61:72:30", "network": {"id": "5f4568f0-c3e8-497f-b7d6-8d92db2f4066", "bridge": "br-int", "label": "tempest-ImagesTestJSON-24352632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eac8e5edc8f14fff89aba7c8cb6cac5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11a1a24a-57", "ovs_interfaceid": "11a1a24a-57ad-4bbf-9f1d-58391009ddde", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.409013] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "refresh_cache-3d22f50a-e1b7-48f9-a044-df64d01dfeb4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.409266] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquired lock "refresh_cache-3d22f50a-e1b7-48f9-a044-df64d01dfeb4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.409335] env[62070]: DEBUG nova.network.neutron [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 808.424667] env[62070]: DEBUG oslo_vmware.rw_handles [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e58e3a-ea0c-c8aa-f4c0-3b6b1ba57006/disk-0.vmdk. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 808.425681] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-135b247d-02e9-4286-92ac-2abcb6e77a89 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.434484] env[62070]: DEBUG oslo_vmware.rw_handles [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e58e3a-ea0c-c8aa-f4c0-3b6b1ba57006/disk-0.vmdk is in state: ready. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 808.434684] env[62070]: ERROR oslo_vmware.rw_handles [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e58e3a-ea0c-c8aa-f4c0-3b6b1ba57006/disk-0.vmdk due to incomplete transfer. [ 808.437516] env[62070]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-471e8709-a466-414b-b9c3-7855a6158443 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.446637] env[62070]: DEBUG oslo_vmware.rw_handles [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e58e3a-ea0c-c8aa-f4c0-3b6b1ba57006/disk-0.vmdk. {{(pid=62070) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 808.446851] env[62070]: DEBUG nova.virt.vmwareapi.images [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Uploaded image 30541779-297e-45bd-87c3-2fea50b14cb3 to the Glance image server {{(pid=62070) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 808.449103] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Destroying the VM {{(pid=62070) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 808.456406] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-64084e13-ac52-4386-931b-51ff89fcef8d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.464884] env[62070]: DEBUG nova.compute.manager [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 808.467505] env[62070]: DEBUG oslo_vmware.api [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52985f2d-bc36-a883-9c2a-ea72a001c10a, 'name': SearchDatastore_Task, 'duration_secs': 0.013442} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.472475] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4bb02f0-a811-4daf-8e1b-fbf28ec606f9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.480097] env[62070]: DEBUG oslo_vmware.api [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121726, 'name': PowerOffVM_Task, 'duration_secs': 0.259022} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.480443] env[62070]: DEBUG oslo_vmware.api [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121724, 'name': PowerOnVM_Task, 'duration_secs': 0.771632} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.486621] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 808.486832] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Volume detach. Driver type: vmdk {{(pid=62070) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 808.487073] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245326', 'volume_id': 'b5d7c242-16d7-4aff-81f3-626f9f878e94', 'name': 'volume-b5d7c242-16d7-4aff-81f3-626f9f878e94', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'dd5d90e8-964a-4e1c-a98a-bcba37a1d79e', 'attached_at': '', 'detached_at': '', 'volume_id': 'b5d7c242-16d7-4aff-81f3-626f9f878e94', 'serial': 'b5d7c242-16d7-4aff-81f3-626f9f878e94'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 808.487465] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 808.487832] env[62070]: INFO nova.compute.manager [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Took 12.26 seconds to spawn the instance on the hypervisor. [ 808.487905] env[62070]: DEBUG nova.compute.manager [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 808.488272] env[62070]: DEBUG oslo_vmware.api [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 808.488272] env[62070]: value = "task-1121728" [ 808.488272] env[62070]: _type = "Task" [ 808.488272] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.489691] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28949da1-04d1-44a3-9868-7522fb4e05da {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.495021] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2804cab5-68dc-4b1d-98b3-1f80cd2d0261 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.501832] env[62070]: DEBUG oslo_vmware.api [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 808.501832] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52bc935b-0a79-74f1-ceb5-ac6e748f8319" [ 808.501832] env[62070]: _type = "Task" [ 808.501832] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.529458] env[62070]: DEBUG oslo_vmware.api [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121728, 'name': Destroy_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.530055] env[62070]: DEBUG nova.compute.manager [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 808.541277] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2627c22-513e-4e68-99b6-73adc14c3e4d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.547527] env[62070]: DEBUG oslo_vmware.api [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52bc935b-0a79-74f1-ceb5-ac6e748f8319, 'name': SearchDatastore_Task, 'duration_secs': 0.019217} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.548269] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.548562] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 328fbc92-8162-4e12-a02d-6e9cafe0c365/328fbc92-8162-4e12-a02d-6e9cafe0c365.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 808.548835] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ebf3d61-ab52-4468-8c02-9316e327bc7c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.553659] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bfeee4a-d6fb-4654-91b3-cb814d013d9a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.560364] env[62070]: DEBUG oslo_vmware.api [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 808.560364] env[62070]: value = "task-1121729" [ 808.560364] env[62070]: _type = "Task" [ 808.560364] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.584308] env[62070]: DEBUG nova.network.neutron [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Successfully created port: 0b2740c1-7e91-45f2-b9e3-95b268c21eff {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 808.590740] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-951f775c-d552-459b-b00e-ed6500a9181c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.597450] env[62070]: DEBUG oslo_vmware.api [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121729, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.604111] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Task: {'id': task-1121725, 'name': ReconfigVM_Task, 'duration_secs': 0.606932} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.618036] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Reconfigured VM instance instance-0000003b to attach disk [datastore2] efef4aac-5b74-4a41-9f74-3d4cb4f80cdb/efef4aac-5b74-4a41-9f74-3d4cb4f80cdb.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 808.619148] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] The volume has not been displaced from its original location: [datastore2] volume-b5d7c242-16d7-4aff-81f3-626f9f878e94/volume-b5d7c242-16d7-4aff-81f3-626f9f878e94.vmdk. No consolidation needed. {{(pid=62070) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 808.624482] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Reconfiguring VM instance instance-0000002f to detach disk 2000 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 808.625092] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f311588a-3a5b-4ee2-b3ad-d51056cc65a2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.626890] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f5638a08-9efd-493b-a950-24c4e7141a37 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.648253] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Waiting for the task: (returnval){ [ 808.648253] env[62070]: value = "task-1121730" [ 808.648253] env[62070]: _type = "Task" [ 808.648253] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.648830] env[62070]: DEBUG oslo_vmware.api [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Waiting for the task: (returnval){ [ 808.648830] env[62070]: value = "task-1121731" [ 808.648830] env[62070]: _type = "Task" [ 808.648830] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.661013] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Task: {'id': task-1121730, 'name': Rename_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.666929] env[62070]: DEBUG oslo_vmware.api [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121731, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.788457] env[62070]: DEBUG oslo_vmware.api [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121727, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.799346] env[62070]: DEBUG oslo_concurrency.lockutils [req-6fff2961-25f8-4b83-bc2a-349e618cb09f req-b3cba487-a3e8-4993-b0b3-8285f9125492 service nova] Releasing lock "refresh_cache-328fbc92-8162-4e12-a02d-6e9cafe0c365" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.808237] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-616a870f-f4cb-44f4-9ebb-3a607ceb289a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.814867] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d91ae2-8f76-474e-9659-6c061b3a4e4c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.852076] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389215bf-23d9-4b93-8bf1-df965f0fac4f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.861446] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a25246-564b-4339-9a1d-daf71a8d4682 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.883848] env[62070]: DEBUG nova.compute.provider_tree [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 808.955711] env[62070]: DEBUG nova.network.neutron [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 808.960753] env[62070]: DEBUG nova.compute.manager [req-b0c42a2b-703f-4cd6-bb30-8a5fe15ac37a req-5738ea1a-2394-4da8-8673-3265d7be0e2d service nova] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Received event network-vif-deleted-3fd232c6-4869-4c79-8250-ae854eb69ae0 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 808.960965] env[62070]: INFO nova.compute.manager [req-b0c42a2b-703f-4cd6-bb30-8a5fe15ac37a req-5738ea1a-2394-4da8-8673-3265d7be0e2d service nova] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Neutron deleted interface 3fd232c6-4869-4c79-8250-ae854eb69ae0; detaching it from the instance and deleting it from the info cache [ 808.961172] env[62070]: DEBUG nova.network.neutron [req-b0c42a2b-703f-4cd6-bb30-8a5fe15ac37a req-5738ea1a-2394-4da8-8673-3265d7be0e2d service nova] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.999700] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.010582] env[62070]: DEBUG oslo_vmware.api [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121728, 'name': Destroy_Task, 'duration_secs': 0.404525} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.010878] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Destroyed the VM [ 809.011861] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Deleting Snapshot of the VM instance {{(pid=62070) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 809.011861] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d88be164-13ec-416c-8ea4-0f3dc2f83e20 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.026965] env[62070]: DEBUG oslo_vmware.api [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 809.026965] env[62070]: value = "task-1121732" [ 809.026965] env[62070]: _type = "Task" [ 809.026965] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.047894] env[62070]: DEBUG oslo_vmware.api [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121732, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.057250] env[62070]: INFO nova.compute.manager [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Took 40.50 seconds to build instance. [ 809.078956] env[62070]: DEBUG oslo_vmware.api [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121729, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.165609] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Task: {'id': task-1121730, 'name': Rename_Task, 'duration_secs': 0.260547} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.169128] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 809.169531] env[62070]: DEBUG oslo_vmware.api [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121731, 'name': ReconfigVM_Task, 'duration_secs': 0.278241} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.169781] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b788712f-4fbf-445c-bf1b-602295044f3c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.171626] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Reconfigured VM instance instance-0000002f to detach disk 2000 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 809.179199] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be9adbc4-b485-4e63-a38c-7ad39d7e2dae {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.192776] env[62070]: DEBUG nova.network.neutron [-] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.201594] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Waiting for the task: (returnval){ [ 809.201594] env[62070]: value = "task-1121733" [ 809.201594] env[62070]: _type = "Task" [ 809.201594] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.203219] env[62070]: DEBUG oslo_vmware.api [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Waiting for the task: (returnval){ [ 809.203219] env[62070]: value = "task-1121734" [ 809.203219] env[62070]: _type = "Task" [ 809.203219] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.221747] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Task: {'id': task-1121733, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.221929] env[62070]: DEBUG oslo_vmware.api [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121734, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.240031] env[62070]: DEBUG nova.network.neutron [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Updating instance_info_cache with network_info: [{"id": "aef8b9b0-4bbd-4af6-b65d-f7e964775fd4", "address": "fa:16:3e:be:d4:45", "network": {"id": "754f4ec8-0bc6-4726-8b88-1a4e1a326699", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-293486644-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a94db233e3a43dc9aa7cf887c6cb1f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaef8b9b0-4b", "ovs_interfaceid": "aef8b9b0-4bbd-4af6-b65d-f7e964775fd4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.289946] env[62070]: DEBUG oslo_vmware.api [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121727, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.387165] env[62070]: DEBUG nova.scheduler.client.report [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 809.465682] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f076f076-ce0f-4bab-8ddf-6544904fd643 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.477736] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-619c68b9-fab1-4fc0-bd1a-a7e69251cdf8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.516036] env[62070]: DEBUG nova.compute.manager [req-b0c42a2b-703f-4cd6-bb30-8a5fe15ac37a req-5738ea1a-2394-4da8-8673-3265d7be0e2d service nova] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Detach interface failed, port_id=3fd232c6-4869-4c79-8250-ae854eb69ae0, reason: Instance 748c94c7-1233-44f4-a71a-176b26518399 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 809.530633] env[62070]: INFO nova.compute.manager [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Rebuilding instance [ 809.537593] env[62070]: DEBUG nova.compute.manager [req-ff6f624d-bf96-4306-a585-10bb434320fd req-9c344cc2-790c-43ff-8f28-d655fbc120fb service nova] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Received event network-vif-plugged-aef8b9b0-4bbd-4af6-b65d-f7e964775fd4 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 809.538108] env[62070]: DEBUG oslo_concurrency.lockutils [req-ff6f624d-bf96-4306-a585-10bb434320fd req-9c344cc2-790c-43ff-8f28-d655fbc120fb service nova] Acquiring lock "3d22f50a-e1b7-48f9-a044-df64d01dfeb4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.538361] env[62070]: DEBUG oslo_concurrency.lockutils [req-ff6f624d-bf96-4306-a585-10bb434320fd req-9c344cc2-790c-43ff-8f28-d655fbc120fb service nova] Lock "3d22f50a-e1b7-48f9-a044-df64d01dfeb4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.538568] env[62070]: DEBUG oslo_concurrency.lockutils [req-ff6f624d-bf96-4306-a585-10bb434320fd req-9c344cc2-790c-43ff-8f28-d655fbc120fb service nova] Lock "3d22f50a-e1b7-48f9-a044-df64d01dfeb4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.538765] env[62070]: DEBUG nova.compute.manager [req-ff6f624d-bf96-4306-a585-10bb434320fd req-9c344cc2-790c-43ff-8f28-d655fbc120fb service nova] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] No waiting events found dispatching network-vif-plugged-aef8b9b0-4bbd-4af6-b65d-f7e964775fd4 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 809.538927] env[62070]: WARNING nova.compute.manager [req-ff6f624d-bf96-4306-a585-10bb434320fd req-9c344cc2-790c-43ff-8f28-d655fbc120fb service nova] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Received unexpected event network-vif-plugged-aef8b9b0-4bbd-4af6-b65d-f7e964775fd4 for instance with vm_state building and task_state spawning. [ 809.539141] env[62070]: DEBUG nova.compute.manager [req-ff6f624d-bf96-4306-a585-10bb434320fd req-9c344cc2-790c-43ff-8f28-d655fbc120fb service nova] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Received event network-changed-aef8b9b0-4bbd-4af6-b65d-f7e964775fd4 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 809.539311] env[62070]: DEBUG nova.compute.manager [req-ff6f624d-bf96-4306-a585-10bb434320fd req-9c344cc2-790c-43ff-8f28-d655fbc120fb service nova] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Refreshing instance network info cache due to event network-changed-aef8b9b0-4bbd-4af6-b65d-f7e964775fd4. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 809.539492] env[62070]: DEBUG oslo_concurrency.lockutils [req-ff6f624d-bf96-4306-a585-10bb434320fd req-9c344cc2-790c-43ff-8f28-d655fbc120fb service nova] Acquiring lock "refresh_cache-3d22f50a-e1b7-48f9-a044-df64d01dfeb4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.546765] env[62070]: DEBUG oslo_vmware.api [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121732, 'name': RemoveSnapshot_Task} progress is 43%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.549547] env[62070]: DEBUG nova.compute.manager [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 809.563388] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6578fefe-a6f2-433c-911f-0627322a57aa tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "f75ed36e-16c8-4a6b-bd39-eb4057ef0691" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 127.996s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.577263] env[62070]: DEBUG oslo_vmware.api [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121729, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.748735} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.580056] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 328fbc92-8162-4e12-a02d-6e9cafe0c365/328fbc92-8162-4e12-a02d-6e9cafe0c365.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 809.580295] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 809.582950] env[62070]: DEBUG nova.virt.hardware [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 809.583166] env[62070]: DEBUG nova.virt.hardware [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 809.583336] env[62070]: DEBUG nova.virt.hardware [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 809.583637] env[62070]: DEBUG nova.virt.hardware [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 809.583712] env[62070]: DEBUG nova.virt.hardware [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 809.583818] env[62070]: DEBUG nova.virt.hardware [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 809.584669] env[62070]: DEBUG nova.virt.hardware [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 809.584669] env[62070]: DEBUG nova.virt.hardware [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 809.584669] env[62070]: DEBUG nova.virt.hardware [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 809.584669] env[62070]: DEBUG nova.virt.hardware [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 809.584669] env[62070]: DEBUG nova.virt.hardware [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 809.585517] env[62070]: DEBUG nova.compute.manager [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 809.585613] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-16968ae2-3e70-493a-b987-5c90074c085e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.588424] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-130324ae-a665-4280-bf91-f56944faf332 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.591911] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c26efd-b9d7-4570-b507-16bc812a3b64 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.605040] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4faf5fe0-2046-4390-a3e7-ab8853392faf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.608224] env[62070]: DEBUG oslo_vmware.api [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 809.608224] env[62070]: value = "task-1121735" [ 809.608224] env[62070]: _type = "Task" [ 809.608224] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.629382] env[62070]: DEBUG oslo_vmware.api [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121735, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.695788] env[62070]: INFO nova.compute.manager [-] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Took 1.97 seconds to deallocate network for instance. [ 809.720051] env[62070]: DEBUG oslo_vmware.api [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121734, 'name': ReconfigVM_Task, 'duration_secs': 0.189062} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.723053] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245326', 'volume_id': 'b5d7c242-16d7-4aff-81f3-626f9f878e94', 'name': 'volume-b5d7c242-16d7-4aff-81f3-626f9f878e94', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'dd5d90e8-964a-4e1c-a98a-bcba37a1d79e', 'attached_at': '', 'detached_at': '', 'volume_id': 'b5d7c242-16d7-4aff-81f3-626f9f878e94', 'serial': 'b5d7c242-16d7-4aff-81f3-626f9f878e94'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 809.724112] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 809.724112] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Task: {'id': task-1121733, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.724613] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31b5a76f-5230-44d7-86c4-dbea66db2e16 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.733103] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 809.733412] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-96d4cf07-a7d3-45a2-8ac4-8fdc24fd3532 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.743302] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Releasing lock "refresh_cache-3d22f50a-e1b7-48f9-a044-df64d01dfeb4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 809.743693] env[62070]: DEBUG nova.compute.manager [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Instance network_info: |[{"id": "aef8b9b0-4bbd-4af6-b65d-f7e964775fd4", "address": "fa:16:3e:be:d4:45", "network": {"id": "754f4ec8-0bc6-4726-8b88-1a4e1a326699", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-293486644-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a94db233e3a43dc9aa7cf887c6cb1f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaef8b9b0-4b", "ovs_interfaceid": "aef8b9b0-4bbd-4af6-b65d-f7e964775fd4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 809.744026] env[62070]: DEBUG oslo_concurrency.lockutils [req-ff6f624d-bf96-4306-a585-10bb434320fd req-9c344cc2-790c-43ff-8f28-d655fbc120fb service nova] Acquired lock "refresh_cache-3d22f50a-e1b7-48f9-a044-df64d01dfeb4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.744314] env[62070]: DEBUG nova.network.neutron [req-ff6f624d-bf96-4306-a585-10bb434320fd req-9c344cc2-790c-43ff-8f28-d655fbc120fb service nova] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Refreshing network info cache for port aef8b9b0-4bbd-4af6-b65d-f7e964775fd4 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 809.745882] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:d4:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d2742ba-c3af-4412-877d-c2811dfeba46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aef8b9b0-4bbd-4af6-b65d-f7e964775fd4', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 809.754479] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Creating folder: Project (1a94db233e3a43dc9aa7cf887c6cb1f6). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 809.755297] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0acdaccc-d8eb-4b87-8009-3faac825fc88 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.770024] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Created folder: Project (1a94db233e3a43dc9aa7cf887c6cb1f6) in parent group-v245319. [ 809.770024] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Creating folder: Instances. Parent ref: group-v245407. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 809.770024] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-176bdf90-fb19-4870-b0e8-0bb76b61b5ff {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.783039] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Created folder: Instances in parent group-v245407. [ 809.783371] env[62070]: DEBUG oslo.service.loopingcall [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 809.784114] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 809.784400] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c283653-ff23-4416-956b-20e85455cfc0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.809847] env[62070]: DEBUG oslo_vmware.api [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121727, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.818087] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 809.818087] env[62070]: value = "task-1121739" [ 809.818087] env[62070]: _type = "Task" [ 809.818087] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.832842] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121739, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.838522] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 809.838522] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 809.838689] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Deleting the datastore file [datastore2] dd5d90e8-964a-4e1c-a98a-bcba37a1d79e {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 809.839774] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b335c88-0e3f-47b8-a744-769356af38de {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.848394] env[62070]: DEBUG oslo_vmware.api [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Waiting for the task: (returnval){ [ 809.848394] env[62070]: value = "task-1121740" [ 809.848394] env[62070]: _type = "Task" [ 809.848394] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.857541] env[62070]: DEBUG oslo_vmware.api [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121740, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.895678] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.389s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.898247] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.743s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.898523] env[62070]: DEBUG nova.objects.instance [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lazy-loading 'resources' on Instance uuid 5a146d8f-6921-4b3e-8696-d2804fb855ba {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 809.916809] env[62070]: INFO nova.scheduler.client.report [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Deleted allocations for instance bcafa04d-904b-4eab-aba1-35180c2d4b22 [ 810.045048] env[62070]: DEBUG oslo_vmware.api [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121732, 'name': RemoveSnapshot_Task, 'duration_secs': 0.994329} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.045433] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Deleted Snapshot of the VM instance {{(pid=62070) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 810.045664] env[62070]: INFO nova.compute.manager [None req-5ea9a700-be66-42ec-a724-4e7eb98c7948 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Took 16.28 seconds to snapshot the instance on the hypervisor. [ 810.071441] env[62070]: DEBUG nova.compute.manager [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 810.118642] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 810.118899] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3ae16cbe-7152-453e-a9ee-bdeaa95e3824 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.126228] env[62070]: DEBUG oslo_vmware.api [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121735, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075075} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.126347] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 810.127199] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac859b9a-7b69-43e6-a4d3-9df4ea05acd5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.131157] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 810.131157] env[62070]: value = "task-1121741" [ 810.131157] env[62070]: _type = "Task" [ 810.131157] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.155559] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] 328fbc92-8162-4e12-a02d-6e9cafe0c365/328fbc92-8162-4e12-a02d-6e9cafe0c365.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 810.155745] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f86d7d54-c8ac-4c2a-a246-25cbc3aba294 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.179152] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121741, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.186105] env[62070]: DEBUG oslo_vmware.api [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 810.186105] env[62070]: value = "task-1121742" [ 810.186105] env[62070]: _type = "Task" [ 810.186105] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.198036] env[62070]: DEBUG oslo_vmware.api [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121742, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.204020] env[62070]: DEBUG oslo_concurrency.lockutils [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 810.214424] env[62070]: DEBUG oslo_vmware.api [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Task: {'id': task-1121733, 'name': PowerOnVM_Task, 'duration_secs': 0.649027} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.214707] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 810.214914] env[62070]: INFO nova.compute.manager [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Took 8.78 seconds to spawn the instance on the hypervisor. [ 810.215148] env[62070]: DEBUG nova.compute.manager [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 810.215910] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-469cd57d-f8b2-4b30-9109-765d48561d1d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.289778] env[62070]: DEBUG oslo_vmware.api [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121727, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.332301] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121739, 'name': CreateVM_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.360101] env[62070]: DEBUG oslo_vmware.api [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Task: {'id': task-1121740, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.236642} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.360375] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 810.360576] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 810.360806] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 810.360998] env[62070]: INFO nova.compute.manager [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Took 2.42 seconds to destroy the instance on the hypervisor. [ 810.361345] env[62070]: DEBUG oslo.service.loopingcall [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 810.361514] env[62070]: DEBUG nova.compute.manager [-] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 810.361612] env[62070]: DEBUG nova.network.neutron [-] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 810.408867] env[62070]: DEBUG nova.network.neutron [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Successfully updated port: 0b2740c1-7e91-45f2-b9e3-95b268c21eff {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 810.425932] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bec45d6d-b2f1-4547-8703-b179a072d73f tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "bcafa04d-904b-4eab-aba1-35180c2d4b22" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.622s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.596125] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 810.607721] env[62070]: DEBUG nova.network.neutron [req-ff6f624d-bf96-4306-a585-10bb434320fd req-9c344cc2-790c-43ff-8f28-d655fbc120fb service nova] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Updated VIF entry in instance network info cache for port aef8b9b0-4bbd-4af6-b65d-f7e964775fd4. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 810.607721] env[62070]: DEBUG nova.network.neutron [req-ff6f624d-bf96-4306-a585-10bb434320fd req-9c344cc2-790c-43ff-8f28-d655fbc120fb service nova] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Updating instance_info_cache with network_info: [{"id": "aef8b9b0-4bbd-4af6-b65d-f7e964775fd4", "address": "fa:16:3e:be:d4:45", "network": {"id": "754f4ec8-0bc6-4726-8b88-1a4e1a326699", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-293486644-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a94db233e3a43dc9aa7cf887c6cb1f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaef8b9b0-4b", "ovs_interfaceid": "aef8b9b0-4bbd-4af6-b65d-f7e964775fd4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.644932] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121741, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.703926] env[62070]: DEBUG oslo_vmware.api [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121742, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.736775] env[62070]: INFO nova.compute.manager [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Took 40.30 seconds to build instance. [ 810.792805] env[62070]: DEBUG oslo_vmware.api [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121727, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.835586] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121739, 'name': CreateVM_Task, 'duration_secs': 0.918411} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.835781] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 810.836883] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.836883] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.837149] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 810.837591] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f007dfc-29d3-4a05-b51a-71eae0dac67b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.845731] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 810.845731] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5254a866-78bf-e311-d246-cd1f8ea9ba83" [ 810.845731] env[62070]: _type = "Task" [ 810.845731] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.857912] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5254a866-78bf-e311-d246-cd1f8ea9ba83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.913965] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "refresh_cache-5ec9074b-1237-4404-b13c-a7ca0dbe1d43" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.914325] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquired lock "refresh_cache-5ec9074b-1237-4404-b13c-a7ca0dbe1d43" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.914586] env[62070]: DEBUG nova.network.neutron [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 810.916709] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273cc9d0-e6b1-402e-9d7b-7da47f4760ef {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.927754] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4618bf4b-d221-42aa-8589-6fba46f40ffe {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.962563] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b5018a-e063-41c3-a4f5-d8c4971c174c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.972592] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15ccb05d-f68f-4f41-80d9-62e3e970f22d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.987355] env[62070]: DEBUG nova.compute.provider_tree [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 811.110585] env[62070]: DEBUG oslo_concurrency.lockutils [req-ff6f624d-bf96-4306-a585-10bb434320fd req-9c344cc2-790c-43ff-8f28-d655fbc120fb service nova] Releasing lock "refresh_cache-3d22f50a-e1b7-48f9-a044-df64d01dfeb4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 811.145093] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121741, 'name': PowerOffVM_Task, 'duration_secs': 0.592402} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.145439] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 811.145721] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 811.146574] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23a07075-9cc1-4af3-8649-2969909ca9d0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.154673] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 811.154937] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7afa2023-c166-462f-b3bd-f51df740ebc8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.198072] env[62070]: DEBUG oslo_vmware.api [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121742, 'name': ReconfigVM_Task, 'duration_secs': 0.647995} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.198490] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Reconfigured VM instance instance-0000003c to attach disk [datastore1] 328fbc92-8162-4e12-a02d-6e9cafe0c365/328fbc92-8162-4e12-a02d-6e9cafe0c365.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 811.199051] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-83dad1b7-ef7e-467a-a95c-b81fc78d1d42 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.207768] env[62070]: DEBUG oslo_vmware.api [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 811.207768] env[62070]: value = "task-1121744" [ 811.207768] env[62070]: _type = "Task" [ 811.207768] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.217570] env[62070]: DEBUG oslo_vmware.api [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121744, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.237898] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 811.238172] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 811.238365] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Deleting the datastore file [datastore2] c3c6e93c-80be-4e71-87fb-2ff8db8d30fe {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 811.238823] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7d89b3fa-4dcc-4992-8983-de4ad7c29798 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Lock "efef4aac-5b74-4a41-9f74-3d4cb4f80cdb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.831s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.239065] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cc44b7aa-9487-4c30-9722-5f32c2b8977c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.247731] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 811.247731] env[62070]: value = "task-1121745" [ 811.247731] env[62070]: _type = "Task" [ 811.247731] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.257731] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121745, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.289075] env[62070]: DEBUG oslo_vmware.api [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121727, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.365997] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5254a866-78bf-e311-d246-cd1f8ea9ba83, 'name': SearchDatastore_Task, 'duration_secs': 0.013317} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.366390] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 811.366526] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 811.367293] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.367293] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.367293] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 811.367293] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d924c408-11ed-4b40-9220-2cf0daad2cf5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.378161] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 811.378372] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 811.379211] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cbc8114-9300-40ab-8669-e49257761571 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.385992] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 811.385992] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52fad7f6-e6b5-7f00-0102-2a3295959604" [ 811.385992] env[62070]: _type = "Task" [ 811.385992] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.394735] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52fad7f6-e6b5-7f00-0102-2a3295959604, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.450258] env[62070]: DEBUG nova.network.neutron [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 811.489773] env[62070]: DEBUG nova.scheduler.client.report [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 811.554112] env[62070]: DEBUG nova.network.neutron [-] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.592158] env[62070]: DEBUG nova.network.neutron [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Updating instance_info_cache with network_info: [{"id": "0b2740c1-7e91-45f2-b9e3-95b268c21eff", "address": "fa:16:3e:b7:ff:91", "network": {"id": "754f4ec8-0bc6-4726-8b88-1a4e1a326699", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-293486644-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a94db233e3a43dc9aa7cf887c6cb1f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b2740c1-7e", "ovs_interfaceid": "0b2740c1-7e91-45f2-b9e3-95b268c21eff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.718711] env[62070]: DEBUG oslo_vmware.api [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121744, 'name': Rename_Task, 'duration_secs': 0.198248} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.718711] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 811.718711] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-920246fa-5e1f-4482-8f6d-9e4e30cc80a4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.727472] env[62070]: DEBUG oslo_vmware.api [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 811.727472] env[62070]: value = "task-1121746" [ 811.727472] env[62070]: _type = "Task" [ 811.727472] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.738234] env[62070]: DEBUG oslo_vmware.api [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121746, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.759199] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121745, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.456171} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.759710] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 811.759966] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 811.760330] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 811.770919] env[62070]: DEBUG nova.compute.manager [req-7aa29c2f-2782-4a75-a609-676ebab0a2f4 req-8ca8c29b-611a-4a97-8b49-9d9575b4e293 service nova] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Received event network-vif-plugged-0b2740c1-7e91-45f2-b9e3-95b268c21eff {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 811.771407] env[62070]: DEBUG oslo_concurrency.lockutils [req-7aa29c2f-2782-4a75-a609-676ebab0a2f4 req-8ca8c29b-611a-4a97-8b49-9d9575b4e293 service nova] Acquiring lock "5ec9074b-1237-4404-b13c-a7ca0dbe1d43-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.771647] env[62070]: DEBUG oslo_concurrency.lockutils [req-7aa29c2f-2782-4a75-a609-676ebab0a2f4 req-8ca8c29b-611a-4a97-8b49-9d9575b4e293 service nova] Lock "5ec9074b-1237-4404-b13c-a7ca0dbe1d43-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.771845] env[62070]: DEBUG oslo_concurrency.lockutils [req-7aa29c2f-2782-4a75-a609-676ebab0a2f4 req-8ca8c29b-611a-4a97-8b49-9d9575b4e293 service nova] Lock "5ec9074b-1237-4404-b13c-a7ca0dbe1d43-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.772039] env[62070]: DEBUG nova.compute.manager [req-7aa29c2f-2782-4a75-a609-676ebab0a2f4 req-8ca8c29b-611a-4a97-8b49-9d9575b4e293 service nova] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] No waiting events found dispatching network-vif-plugged-0b2740c1-7e91-45f2-b9e3-95b268c21eff {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 811.772264] env[62070]: WARNING nova.compute.manager [req-7aa29c2f-2782-4a75-a609-676ebab0a2f4 req-8ca8c29b-611a-4a97-8b49-9d9575b4e293 service nova] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Received unexpected event network-vif-plugged-0b2740c1-7e91-45f2-b9e3-95b268c21eff for instance with vm_state building and task_state spawning. [ 811.772720] env[62070]: DEBUG nova.compute.manager [req-7aa29c2f-2782-4a75-a609-676ebab0a2f4 req-8ca8c29b-611a-4a97-8b49-9d9575b4e293 service nova] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Received event network-changed-0b2740c1-7e91-45f2-b9e3-95b268c21eff {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 811.772965] env[62070]: DEBUG nova.compute.manager [req-7aa29c2f-2782-4a75-a609-676ebab0a2f4 req-8ca8c29b-611a-4a97-8b49-9d9575b4e293 service nova] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Refreshing instance network info cache due to event network-changed-0b2740c1-7e91-45f2-b9e3-95b268c21eff. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 811.773151] env[62070]: DEBUG oslo_concurrency.lockutils [req-7aa29c2f-2782-4a75-a609-676ebab0a2f4 req-8ca8c29b-611a-4a97-8b49-9d9575b4e293 service nova] Acquiring lock "refresh_cache-5ec9074b-1237-4404-b13c-a7ca0dbe1d43" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.790851] env[62070]: DEBUG oslo_vmware.api [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121727, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.898417] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52fad7f6-e6b5-7f00-0102-2a3295959604, 'name': SearchDatastore_Task, 'duration_secs': 0.011603} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.899317] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22d29b95-ce8b-4b09-be66-af9e4b48d598 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.907120] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 811.907120] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52d882dc-cf14-f6fe-0ee3-e98b07ea0b23" [ 811.907120] env[62070]: _type = "Task" [ 811.907120] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.917869] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52d882dc-cf14-f6fe-0ee3-e98b07ea0b23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.995404] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.097s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.997911] env[62070]: DEBUG oslo_concurrency.lockutils [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.012s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.998995] env[62070]: DEBUG nova.objects.instance [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Lazy-loading 'resources' on Instance uuid 20e7a993-b1fb-4359-ab35-8b0f06ca0121 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 812.024235] env[62070]: INFO nova.scheduler.client.report [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Deleted allocations for instance 5a146d8f-6921-4b3e-8696-d2804fb855ba [ 812.059485] env[62070]: INFO nova.compute.manager [-] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Took 1.70 seconds to deallocate network for instance. [ 812.097684] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Releasing lock "refresh_cache-5ec9074b-1237-4404-b13c-a7ca0dbe1d43" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.098395] env[62070]: DEBUG nova.compute.manager [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Instance network_info: |[{"id": "0b2740c1-7e91-45f2-b9e3-95b268c21eff", "address": "fa:16:3e:b7:ff:91", "network": {"id": "754f4ec8-0bc6-4726-8b88-1a4e1a326699", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-293486644-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a94db233e3a43dc9aa7cf887c6cb1f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b2740c1-7e", "ovs_interfaceid": "0b2740c1-7e91-45f2-b9e3-95b268c21eff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 812.098395] env[62070]: DEBUG oslo_concurrency.lockutils [req-7aa29c2f-2782-4a75-a609-676ebab0a2f4 req-8ca8c29b-611a-4a97-8b49-9d9575b4e293 service nova] Acquired lock "refresh_cache-5ec9074b-1237-4404-b13c-a7ca0dbe1d43" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.098746] env[62070]: DEBUG nova.network.neutron [req-7aa29c2f-2782-4a75-a609-676ebab0a2f4 req-8ca8c29b-611a-4a97-8b49-9d9575b4e293 service nova] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Refreshing network info cache for port 0b2740c1-7e91-45f2-b9e3-95b268c21eff {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 812.099846] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:ff:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d2742ba-c3af-4412-877d-c2811dfeba46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0b2740c1-7e91-45f2-b9e3-95b268c21eff', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 812.108389] env[62070]: DEBUG oslo.service.loopingcall [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 812.111697] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 812.111697] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-928519dc-7b2b-46e9-97a5-8a4f0ef0db13 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.132588] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 812.132588] env[62070]: value = "task-1121747" [ 812.132588] env[62070]: _type = "Task" [ 812.132588] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.141660] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121747, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.241034] env[62070]: DEBUG oslo_vmware.api [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121746, 'name': PowerOnVM_Task, 'duration_secs': 0.481128} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.241034] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 812.241034] env[62070]: INFO nova.compute.manager [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Took 8.07 seconds to spawn the instance on the hypervisor. [ 812.241034] env[62070]: DEBUG nova.compute.manager [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 812.241034] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-404c1b5d-dc12-4c77-b941-445d48d1bb55 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.296299] env[62070]: DEBUG oslo_vmware.api [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121727, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.419672] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52d882dc-cf14-f6fe-0ee3-e98b07ea0b23, 'name': SearchDatastore_Task, 'duration_secs': 0.012859} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.420188] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.421327] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 3d22f50a-e1b7-48f9-a044-df64d01dfeb4/3d22f50a-e1b7-48f9-a044-df64d01dfeb4.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 812.422991] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-75f94793-35b9-4fa5-a764-54996700d46c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.431825] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 812.431825] env[62070]: value = "task-1121748" [ 812.431825] env[62070]: _type = "Task" [ 812.431825] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.440961] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121748, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.532615] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a6b38dca-80cd-4c17-b569-dea27e709a06 tempest-ListServersNegativeTestJSON-74757126 tempest-ListServersNegativeTestJSON-74757126-project-member] Lock "5a146d8f-6921-4b3e-8696-d2804fb855ba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.605s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.618926] env[62070]: INFO nova.compute.manager [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Took 0.56 seconds to detach 1 volumes for instance. [ 812.624917] env[62070]: DEBUG nova.compute.manager [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Deleting volume: b5d7c242-16d7-4aff-81f3-626f9f878e94 {{(pid=62070) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3247}} [ 812.649595] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121747, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.762173] env[62070]: INFO nova.compute.manager [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Took 41.71 seconds to build instance. [ 812.803142] env[62070]: DEBUG oslo_vmware.api [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121727, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.809469] env[62070]: DEBUG nova.compute.manager [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 812.812083] env[62070]: DEBUG nova.virt.hardware [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 812.812570] env[62070]: DEBUG nova.virt.hardware [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 812.812570] env[62070]: DEBUG nova.virt.hardware [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 812.812720] env[62070]: DEBUG nova.virt.hardware [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 812.812874] env[62070]: DEBUG nova.virt.hardware [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 812.813038] env[62070]: DEBUG nova.virt.hardware [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 812.813280] env[62070]: DEBUG nova.virt.hardware [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 812.813454] env[62070]: DEBUG nova.virt.hardware [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 812.813629] env[62070]: DEBUG nova.virt.hardware [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 812.813798] env[62070]: DEBUG nova.virt.hardware [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 812.813979] env[62070]: DEBUG nova.virt.hardware [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 812.815112] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc34073-ff3a-4ffa-b463-4452ca04271c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.819905] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6385f2-0b40-466a-9514-1fd6f64a52d4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.836467] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8baa38b4-00b5-42e7-b6ca-558cc723b916 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.863904] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:ff:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7e0240aa-a694-48fc-a0f9-6f2d3e71aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8ed3d649-bc61-493f-b8e9-2e7f7fad49ed', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 812.874806] env[62070]: DEBUG oslo.service.loopingcall [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 812.884045] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 812.884045] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-144f2393-d0c1-4abf-af4f-2b5cdc4d8247 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.911614] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 812.911614] env[62070]: value = "task-1121750" [ 812.911614] env[62070]: _type = "Task" [ 812.911614] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.926294] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121750, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.956822] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121748, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.017943] env[62070]: DEBUG nova.network.neutron [req-7aa29c2f-2782-4a75-a609-676ebab0a2f4 req-8ca8c29b-611a-4a97-8b49-9d9575b4e293 service nova] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Updated VIF entry in instance network info cache for port 0b2740c1-7e91-45f2-b9e3-95b268c21eff. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 813.018458] env[62070]: DEBUG nova.network.neutron [req-7aa29c2f-2782-4a75-a609-676ebab0a2f4 req-8ca8c29b-611a-4a97-8b49-9d9575b4e293 service nova] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Updating instance_info_cache with network_info: [{"id": "0b2740c1-7e91-45f2-b9e3-95b268c21eff", "address": "fa:16:3e:b7:ff:91", "network": {"id": "754f4ec8-0bc6-4726-8b88-1a4e1a326699", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-293486644-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a94db233e3a43dc9aa7cf887c6cb1f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b2740c1-7e", "ovs_interfaceid": "0b2740c1-7e91-45f2-b9e3-95b268c21eff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.066687] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edefd68a-0ad3-46f6-8711-caa864ffb9f2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.075705] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca2d00e-cc82-488a-b9e2-f0d6426e963a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.107674] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-971e3451-89ae-4441-82b1-0c8b3fd92f8a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.117610] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce705d2-efd4-48cf-bdf1-5f01d1fc76aa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.137556] env[62070]: DEBUG nova.compute.provider_tree [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 813.148181] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121747, 'name': CreateVM_Task, 'duration_secs': 0.563442} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.149083] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 813.149773] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.149949] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.150296] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 813.150799] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9f69ddf-84bb-4b18-8c53-7c01b262d5eb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.156028] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 813.156028] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]521cc9ca-5053-45ef-7152-faf32fed1fa8" [ 813.156028] env[62070]: _type = "Task" [ 813.156028] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.165620] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]521cc9ca-5053-45ef-7152-faf32fed1fa8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.180239] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.270923] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24dc43a0-9208-4ff7-9026-97bbf9aa150b tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "328fbc92-8162-4e12-a02d-6e9cafe0c365" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 123.059s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.294620] env[62070]: DEBUG oslo_vmware.api [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121727, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.351137] env[62070]: INFO nova.compute.manager [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] instance snapshotting [ 813.357641] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58d54c6-fbf9-417f-80e1-9c880dfb3902 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.379951] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd5aff4a-2539-4ab5-bbeb-ebdc2ce4cc2c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.423637] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121750, 'name': CreateVM_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.446750] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121748, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.624028} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.447526] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 3d22f50a-e1b7-48f9-a044-df64d01dfeb4/3d22f50a-e1b7-48f9-a044-df64d01dfeb4.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 813.447846] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 813.448696] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8c288fdb-7761-44b7-bfb9-d9e657e5878d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.457322] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 813.457322] env[62070]: value = "task-1121751" [ 813.457322] env[62070]: _type = "Task" [ 813.457322] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.470570] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121751, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.493332] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5dd9a036-2e35-4e0c-8126-040992989298 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "10672096-00ba-4481-8ab3-085a185076db" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.493616] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5dd9a036-2e35-4e0c-8126-040992989298 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "10672096-00ba-4481-8ab3-085a185076db" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.493803] env[62070]: DEBUG nova.compute.manager [None req-5dd9a036-2e35-4e0c-8126-040992989298 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 813.494797] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c585d0f4-2bab-4db5-be82-d4aab0962541 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.505372] env[62070]: DEBUG nova.compute.manager [None req-5dd9a036-2e35-4e0c-8126-040992989298 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62070) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 813.505995] env[62070]: DEBUG nova.objects.instance [None req-5dd9a036-2e35-4e0c-8126-040992989298 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lazy-loading 'flavor' on Instance uuid 10672096-00ba-4481-8ab3-085a185076db {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 813.525340] env[62070]: DEBUG oslo_concurrency.lockutils [req-7aa29c2f-2782-4a75-a609-676ebab0a2f4 req-8ca8c29b-611a-4a97-8b49-9d9575b4e293 service nova] Releasing lock "refresh_cache-5ec9074b-1237-4404-b13c-a7ca0dbe1d43" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 813.526088] env[62070]: DEBUG nova.compute.manager [req-7aa29c2f-2782-4a75-a609-676ebab0a2f4 req-8ca8c29b-611a-4a97-8b49-9d9575b4e293 service nova] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Received event network-vif-deleted-f9ec7c6a-7ed9-4d9d-9e32-c182bc79b903 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 813.640315] env[62070]: DEBUG nova.scheduler.client.report [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 813.670374] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]521cc9ca-5053-45ef-7152-faf32fed1fa8, 'name': SearchDatastore_Task, 'duration_secs': 0.012604} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.670689] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 813.670953] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 813.671194] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.671336] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.671606] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 813.671997] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a067ba52-fcba-43fa-ab56-7fa6a6a7956d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.691016] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 813.691227] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 813.692021] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8485677a-8bda-4485-88ec-61828d28364c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.698464] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 813.698464] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5273c432-ce2a-de64-eae3-4f3200f93f23" [ 813.698464] env[62070]: _type = "Task" [ 813.698464] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.707449] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5273c432-ce2a-de64-eae3-4f3200f93f23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.797127] env[62070]: DEBUG oslo_vmware.api [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121727, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.895888] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Creating Snapshot of the VM instance {{(pid=62070) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 813.896339] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3630902c-ab86-42fd-82f4-61eab5118543 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.906685] env[62070]: DEBUG oslo_vmware.api [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 813.906685] env[62070]: value = "task-1121752" [ 813.906685] env[62070]: _type = "Task" [ 813.906685] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.921534] env[62070]: DEBUG oslo_vmware.api [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121752, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.928308] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121750, 'name': CreateVM_Task, 'duration_secs': 0.699498} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.928534] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 813.929252] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.929497] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.929864] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 813.930555] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aabac15b-f958-4218-a8c2-269506c5cf5d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.935602] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 813.935602] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]525d7e89-36db-db07-8601-07cc26e8788e" [ 813.935602] env[62070]: _type = "Task" [ 813.935602] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.945368] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]525d7e89-36db-db07-8601-07cc26e8788e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.968698] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121751, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083928} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.969066] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 813.969773] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9036da77-da81-4b00-b423-4b02de949661 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.994838] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 3d22f50a-e1b7-48f9-a044-df64d01dfeb4/3d22f50a-e1b7-48f9-a044-df64d01dfeb4.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 813.995083] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5fcf8454-fff0-4c85-85ad-0a7fe7432d85 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.013188] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dd9a036-2e35-4e0c-8126-040992989298 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 814.013599] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e148efb-403f-4726-8c54-bf8ccf0c1251 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.019697] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 814.019697] env[62070]: value = "task-1121753" [ 814.019697] env[62070]: _type = "Task" [ 814.019697] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.024383] env[62070]: DEBUG oslo_vmware.api [None req-5dd9a036-2e35-4e0c-8126-040992989298 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 814.024383] env[62070]: value = "task-1121754" [ 814.024383] env[62070]: _type = "Task" [ 814.024383] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.032875] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121753, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.041722] env[62070]: DEBUG oslo_vmware.api [None req-5dd9a036-2e35-4e0c-8126-040992989298 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121754, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.145180] env[62070]: DEBUG oslo_concurrency.lockutils [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.147s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.148802] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.303s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.149089] env[62070]: DEBUG nova.objects.instance [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lazy-loading 'resources' on Instance uuid 13e3576e-4f4c-4541-a637-daa124cbf8dd {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 814.171749] env[62070]: INFO nova.scheduler.client.report [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Deleted allocations for instance 20e7a993-b1fb-4359-ab35-8b0f06ca0121 [ 814.214541] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5273c432-ce2a-de64-eae3-4f3200f93f23, 'name': SearchDatastore_Task, 'duration_secs': 0.058272} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.214583] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f623ff2-addf-4b0f-b0c0-2237bec73769 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.225095] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 814.225095] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]529da973-a00d-eb6c-324e-eaff36ce9551" [ 814.225095] env[62070]: _type = "Task" [ 814.225095] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.235560] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]529da973-a00d-eb6c-324e-eaff36ce9551, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.294768] env[62070]: DEBUG oslo_vmware.api [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121727, 'name': ReconfigVM_Task, 'duration_secs': 5.88429} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.295997] env[62070]: DEBUG oslo_concurrency.lockutils [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Releasing lock "0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.295997] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Reconfigured VM to detach interface {{(pid=62070) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1984}} [ 814.421307] env[62070]: DEBUG oslo_vmware.api [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121752, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.446597] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]525d7e89-36db-db07-8601-07cc26e8788e, 'name': SearchDatastore_Task, 'duration_secs': 0.016061} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.446969] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.447258] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 814.447932] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 814.536212] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121753, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.539408] env[62070]: DEBUG oslo_vmware.api [None req-5dd9a036-2e35-4e0c-8126-040992989298 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121754, 'name': PowerOffVM_Task, 'duration_secs': 0.372364} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.539668] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dd9a036-2e35-4e0c-8126-040992989298 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 814.539866] env[62070]: DEBUG nova.compute.manager [None req-5dd9a036-2e35-4e0c-8126-040992989298 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 814.540641] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df6c8e22-5794-491a-9a6e-751fb98804a4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.690466] env[62070]: DEBUG oslo_concurrency.lockutils [None req-79fef9f0-fe80-4c90-acae-ea6e355c29d5 tempest-ServerMetadataNegativeTestJSON-1698615623 tempest-ServerMetadataNegativeTestJSON-1698615623-project-member] Lock "20e7a993-b1fb-4359-ab35-8b0f06ca0121" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.715s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.739889] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]529da973-a00d-eb6c-324e-eaff36ce9551, 'name': SearchDatastore_Task, 'duration_secs': 0.021432} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.740602] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.740965] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 5ec9074b-1237-4404-b13c-a7ca0dbe1d43/5ec9074b-1237-4404-b13c-a7ca0dbe1d43.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 814.741497] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.741780] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 814.742144] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-125300c8-a51c-4dfb-85b0-1d2b3b42d77a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.746824] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b88542e-07af-4e45-abb1-50f09652e2b1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.760402] env[62070]: DEBUG nova.compute.manager [req-f6b9d101-6097-4be5-bda2-bc92d240f77a req-2d0ad51e-3303-4d3d-8fb8-f665da17fbf1 service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Received event network-vif-deleted-7df84135-5c3d-48c5-b2cf-176e77094879 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 814.760402] env[62070]: INFO nova.compute.manager [req-f6b9d101-6097-4be5-bda2-bc92d240f77a req-2d0ad51e-3303-4d3d-8fb8-f665da17fbf1 service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Neutron deleted interface 7df84135-5c3d-48c5-b2cf-176e77094879; detaching it from the instance and deleting it from the info cache [ 814.760402] env[62070]: DEBUG nova.network.neutron [req-f6b9d101-6097-4be5-bda2-bc92d240f77a req-2d0ad51e-3303-4d3d-8fb8-f665da17fbf1 service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Updating instance_info_cache with network_info: [{"id": "6326b098-3c76-4152-b623-8921285ec01b", "address": "fa:16:3e:cc:12:22", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6326b098-3c", "ovs_interfaceid": "6326b098-3c76-4152-b623-8921285ec01b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "848ce3e0-8d08-460b-b770-75628ae28fd3", "address": "fa:16:3e:28:b0:1e", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap848ce3e0-8d", "ovs_interfaceid": "848ce3e0-8d08-460b-b770-75628ae28fd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.764831] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 814.764831] env[62070]: value = "task-1121755" [ 814.764831] env[62070]: _type = "Task" [ 814.764831] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.767508] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 814.767737] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 814.770993] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81019d87-9466-444c-9fe1-d8a7796a10e9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.777760] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121755, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.787148] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 814.787148] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52e1968f-161b-1da8-e63c-81f0f27bfa19" [ 814.787148] env[62070]: _type = "Task" [ 814.787148] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.798335] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52e1968f-161b-1da8-e63c-81f0f27bfa19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.887742] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Acquiring lock "efef4aac-5b74-4a41-9f74-3d4cb4f80cdb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.888033] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Lock "efef4aac-5b74-4a41-9f74-3d4cb4f80cdb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.888258] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Acquiring lock "efef4aac-5b74-4a41-9f74-3d4cb4f80cdb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.888485] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Lock "efef4aac-5b74-4a41-9f74-3d4cb4f80cdb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.888673] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Lock "efef4aac-5b74-4a41-9f74-3d4cb4f80cdb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.891021] env[62070]: INFO nova.compute.manager [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Terminating instance [ 814.895946] env[62070]: DEBUG nova.compute.manager [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 814.896851] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 814.896987] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b8a577c-00e9-4273-b763-4d84b5c70ffa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.907064] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 814.909774] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3352fb90-3ef4-4314-986b-bc765e27af86 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.922743] env[62070]: DEBUG oslo_vmware.api [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121752, 'name': CreateSnapshot_Task, 'duration_secs': 1.005164} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.924647] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Created Snapshot of the VM instance {{(pid=62070) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 814.925110] env[62070]: DEBUG oslo_vmware.api [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Waiting for the task: (returnval){ [ 814.925110] env[62070]: value = "task-1121756" [ 814.925110] env[62070]: _type = "Task" [ 814.925110] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.929863] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e1f72b8-4f9e-4949-8cc6-df6e01832f14 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.952151] env[62070]: DEBUG oslo_vmware.api [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Task: {'id': task-1121756, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.042359] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121753, 'name': ReconfigVM_Task, 'duration_secs': 0.735886} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.042875] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 3d22f50a-e1b7-48f9-a044-df64d01dfeb4/3d22f50a-e1b7-48f9-a044-df64d01dfeb4.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 815.043712] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9c1b13ca-551e-4af6-8106-90ea8df39d33 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.057249] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 815.057249] env[62070]: value = "task-1121757" [ 815.057249] env[62070]: _type = "Task" [ 815.057249] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.058625] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5dd9a036-2e35-4e0c-8126-040992989298 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "10672096-00ba-4481-8ab3-085a185076db" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.564s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.085229] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121757, 'name': Rename_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.199783] env[62070]: DEBUG nova.compute.manager [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 815.201551] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dafca60-e321-45bb-afdc-f4e813b096e5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.239429] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a49b98-f02d-4a49-880c-15f2f7d9bf35 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.255027] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee637cd0-6f0b-427f-b520-b2d9d4018336 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.294510] env[62070]: DEBUG oslo_concurrency.lockutils [req-f6b9d101-6097-4be5-bda2-bc92d240f77a req-2d0ad51e-3303-4d3d-8fb8-f665da17fbf1 service nova] Acquiring lock "0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.294751] env[62070]: DEBUG oslo_concurrency.lockutils [req-f6b9d101-6097-4be5-bda2-bc92d240f77a req-2d0ad51e-3303-4d3d-8fb8-f665da17fbf1 service nova] Acquired lock "0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.297010] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e9d4e3-36c8-4e5e-81c9-594d0bc8edba {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.304182] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a9933b6-c3ea-4c2c-97d3-d8c9dfd343a2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.312033] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121755, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.323021] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52e1968f-161b-1da8-e63c-81f0f27bfa19, 'name': SearchDatastore_Task, 'duration_secs': 0.012297} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.339371] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17726c69-7a61-4906-a72e-2d682ddfc538 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.345369] env[62070]: DEBUG oslo_concurrency.lockutils [req-f6b9d101-6097-4be5-bda2-bc92d240f77a req-2d0ad51e-3303-4d3d-8fb8-f665da17fbf1 service nova] Releasing lock "0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 815.345667] env[62070]: WARNING nova.compute.manager [req-f6b9d101-6097-4be5-bda2-bc92d240f77a req-2d0ad51e-3303-4d3d-8fb8-f665da17fbf1 service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Detach interface failed, port_id=7df84135-5c3d-48c5-b2cf-176e77094879, reason: No device with interface-id 7df84135-5c3d-48c5-b2cf-176e77094879 exists on VM: nova.exception.NotFound: No device with interface-id 7df84135-5c3d-48c5-b2cf-176e77094879 exists on VM [ 815.346125] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14159b16-9073-4f0b-b2df-74dd3c9263b1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.353164] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 815.353164] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5232a46a-2341-f66b-9568-d0aed6614dad" [ 815.353164] env[62070]: _type = "Task" [ 815.353164] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.363330] env[62070]: DEBUG nova.compute.provider_tree [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 815.373977] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5232a46a-2341-f66b-9568-d0aed6614dad, 'name': SearchDatastore_Task, 'duration_secs': 0.012501} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.374374] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 815.374575] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] c3c6e93c-80be-4e71-87fb-2ff8db8d30fe/c3c6e93c-80be-4e71-87fb-2ff8db8d30fe.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 815.374863] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e750104f-1d53-4afe-8889-b7c490ed0ce2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.384292] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 815.384292] env[62070]: value = "task-1121758" [ 815.384292] env[62070]: _type = "Task" [ 815.384292] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.398402] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121758, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.447100] env[62070]: DEBUG oslo_vmware.api [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Task: {'id': task-1121756, 'name': PowerOffVM_Task, 'duration_secs': 0.247656} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.447440] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 815.447698] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 815.448287] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8d0eebb8-df57-4b94-96bd-4b7382130e46 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.462486] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Creating linked-clone VM from snapshot {{(pid=62070) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 815.463233] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-42a10500-20a4-4736-93ff-0d23f73d1f5d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.473212] env[62070]: DEBUG oslo_vmware.api [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 815.473212] env[62070]: value = "task-1121760" [ 815.473212] env[62070]: _type = "Task" [ 815.473212] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.483367] env[62070]: DEBUG oslo_vmware.api [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121760, 'name': CloneVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.571263] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121757, 'name': Rename_Task, 'duration_secs': 0.411393} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.571542] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 815.572273] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc66b676-7e5e-4b0e-b7f8-577b03fc2fe9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.576802] env[62070]: DEBUG oslo_concurrency.lockutils [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "refresh_cache-0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.577080] env[62070]: DEBUG oslo_concurrency.lockutils [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired lock "refresh_cache-0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.577237] env[62070]: DEBUG nova.network.neutron [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 815.583071] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 815.583071] env[62070]: value = "task-1121761" [ 815.583071] env[62070]: _type = "Task" [ 815.583071] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.589961] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121761, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.720181] env[62070]: INFO nova.compute.manager [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] instance snapshotting [ 815.723506] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9991b32b-0655-4ce8-90ce-4abdd0ed84dc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.746851] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b29ec0-48b5-4872-841c-ceeb6ee1828d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.774753] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121755, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.586935} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.775128] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 5ec9074b-1237-4404-b13c-a7ca0dbe1d43/5ec9074b-1237-4404-b13c-a7ca0dbe1d43.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 815.775376] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 815.775962] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-40ad6590-77f6-413e-a15f-8c0c0864e2cd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.785219] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 815.785219] env[62070]: value = "task-1121762" [ 815.785219] env[62070]: _type = "Task" [ 815.785219] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.796984] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121762, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.869358] env[62070]: DEBUG nova.scheduler.client.report [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 815.896966] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121758, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.899584] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "0ac963b1-120a-464b-8228-3393135dd182" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.900079] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "0ac963b1-120a-464b-8228-3393135dd182" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.901609] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "0ac963b1-120a-464b-8228-3393135dd182-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.901609] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "0ac963b1-120a-464b-8228-3393135dd182-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.901609] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "0ac963b1-120a-464b-8228-3393135dd182-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.905314] env[62070]: INFO nova.compute.manager [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Terminating instance [ 815.908741] env[62070]: DEBUG nova.compute.manager [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 815.909186] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 815.911308] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce7444e-84c0-41c3-8236-c1d94ba1abc2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.924493] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 815.925343] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5669f586-abb2-4738-b211-4e79fd6c2d17 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.934890] env[62070]: DEBUG oslo_vmware.api [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 815.934890] env[62070]: value = "task-1121763" [ 815.934890] env[62070]: _type = "Task" [ 815.934890] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.946965] env[62070]: DEBUG oslo_vmware.api [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121763, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.985629] env[62070]: DEBUG oslo_vmware.api [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121760, 'name': CloneVM_Task} progress is 94%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.094408] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121761, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.259585] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Creating Snapshot of the VM instance {{(pid=62070) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 816.260267] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-98a79cfc-37cb-4bc3-9cca-3f9fdf1f3d29 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.270699] env[62070]: DEBUG oslo_vmware.api [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 816.270699] env[62070]: value = "task-1121764" [ 816.270699] env[62070]: _type = "Task" [ 816.270699] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.285024] env[62070]: DEBUG oslo_vmware.api [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121764, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.296680] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121762, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.125359} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.296969] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 816.297831] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab2df13-30b3-421a-8e2a-ca920d85d061 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.326621] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 5ec9074b-1237-4404-b13c-a7ca0dbe1d43/5ec9074b-1237-4404-b13c-a7ca0dbe1d43.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 816.327786] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24cac127-ba13-4209-9fad-5af92cbf2af0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.347393] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 816.347720] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 816.348032] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Deleting the datastore file [datastore2] efef4aac-5b74-4a41-9f74-3d4cb4f80cdb {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 816.348380] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-414aac43-317c-4698-afa0-ba51d1bcf880 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.354724] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 816.354724] env[62070]: value = "task-1121765" [ 816.354724] env[62070]: _type = "Task" [ 816.354724] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.361200] env[62070]: DEBUG oslo_vmware.api [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Waiting for the task: (returnval){ [ 816.361200] env[62070]: value = "task-1121766" [ 816.361200] env[62070]: _type = "Task" [ 816.361200] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.366049] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121765, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.375552] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.227s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.377678] env[62070]: DEBUG oslo_vmware.api [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Task: {'id': task-1121766, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.379300] env[62070]: DEBUG oslo_concurrency.lockutils [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.921s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.381052] env[62070]: INFO nova.compute.claims [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 816.396571] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121758, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.80786} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.397098] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] c3c6e93c-80be-4e71-87fb-2ff8db8d30fe/c3c6e93c-80be-4e71-87fb-2ff8db8d30fe.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 816.397242] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 816.397491] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5d6145fd-dc64-4e59-9bd9-cfb36bcaa693 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.405943] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 816.405943] env[62070]: value = "task-1121767" [ 816.405943] env[62070]: _type = "Task" [ 816.405943] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.417984] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121767, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.420060] env[62070]: INFO nova.scheduler.client.report [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Deleted allocations for instance 13e3576e-4f4c-4541-a637-daa124cbf8dd [ 816.452139] env[62070]: DEBUG oslo_vmware.api [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121763, 'name': PowerOffVM_Task, 'duration_secs': 0.478387} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.452492] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 816.452780] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 816.453850] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9cab0a6f-f880-47e4-bc10-ba533168103b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.490133] env[62070]: DEBUG oslo_vmware.api [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121760, 'name': CloneVM_Task} progress is 94%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.594015] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121761, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.689555] env[62070]: DEBUG nova.objects.instance [None req-c3a2fe66-5321-413d-a208-355f06146e02 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lazy-loading 'flavor' on Instance uuid 10672096-00ba-4481-8ab3-085a185076db {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 816.783681] env[62070]: DEBUG oslo_vmware.api [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121764, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.793642] env[62070]: INFO nova.network.neutron [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Port 848ce3e0-8d08-460b-b770-75628ae28fd3 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 816.794030] env[62070]: DEBUG nova.network.neutron [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Updating instance_info_cache with network_info: [{"id": "6326b098-3c76-4152-b623-8921285ec01b", "address": "fa:16:3e:cc:12:22", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6326b098-3c", "ovs_interfaceid": "6326b098-3c76-4152-b623-8921285ec01b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.864985] env[62070]: DEBUG nova.compute.manager [req-eacbcc24-a234-4a26-b9d8-7a4fd0b476e7 req-2d704a6d-116d-4cea-aa3a-56276f3253d3 service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Received event network-vif-deleted-848ce3e0-8d08-460b-b770-75628ae28fd3 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 816.874228] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121765, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.881339] env[62070]: DEBUG oslo_vmware.api [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Task: {'id': task-1121766, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.239271} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.881610] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 816.881800] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 816.882090] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 816.882320] env[62070]: INFO nova.compute.manager [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Took 1.99 seconds to destroy the instance on the hypervisor. [ 816.882587] env[62070]: DEBUG oslo.service.loopingcall [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 816.882945] env[62070]: DEBUG nova.compute.manager [-] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 816.882945] env[62070]: DEBUG nova.network.neutron [-] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 816.918323] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121767, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081103} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.918323] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 816.918876] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b53881ff-a7a0-498d-a3ed-e2ac354d5fb4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.948297] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] c3c6e93c-80be-4e71-87fb-2ff8db8d30fe/c3c6e93c-80be-4e71-87fb-2ff8db8d30fe.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 816.951273] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1f25b52c-bc5b-4328-ac52-d99c50d50e57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "13e3576e-4f4c-4541-a637-daa124cbf8dd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.857s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.955769] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba3bbc34-0b70-4736-8909-1c54977c800c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.981798] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 816.981798] env[62070]: value = "task-1121769" [ 816.981798] env[62070]: _type = "Task" [ 816.981798] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.989786] env[62070]: DEBUG oslo_vmware.api [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121760, 'name': CloneVM_Task} progress is 94%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.997071] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121769, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.097763] env[62070]: DEBUG oslo_vmware.api [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121761, 'name': PowerOnVM_Task, 'duration_secs': 1.037019} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.098345] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 817.098776] env[62070]: INFO nova.compute.manager [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Took 10.18 seconds to spawn the instance on the hypervisor. [ 817.099726] env[62070]: DEBUG nova.compute.manager [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 817.102346] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d52af20-81bb-4e1c-bd2c-f722a3e5c535 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.200325] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c3a2fe66-5321-413d-a208-355f06146e02 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "refresh_cache-10672096-00ba-4481-8ab3-085a185076db" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 817.200325] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c3a2fe66-5321-413d-a208-355f06146e02 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquired lock "refresh_cache-10672096-00ba-4481-8ab3-085a185076db" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.200325] env[62070]: DEBUG nova.network.neutron [None req-c3a2fe66-5321-413d-a208-355f06146e02 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 817.200325] env[62070]: DEBUG nova.objects.instance [None req-c3a2fe66-5321-413d-a208-355f06146e02 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lazy-loading 'info_cache' on Instance uuid 10672096-00ba-4481-8ab3-085a185076db {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 817.286561] env[62070]: DEBUG oslo_vmware.api [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121764, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.300785] env[62070]: DEBUG oslo_concurrency.lockutils [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Releasing lock "refresh_cache-0ac963b1-120a-464b-8228-3393135dd182" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 817.370674] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121765, 'name': ReconfigVM_Task, 'duration_secs': 0.854332} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.370974] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 5ec9074b-1237-4404-b13c-a7ca0dbe1d43/5ec9074b-1237-4404-b13c-a7ca0dbe1d43.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 817.371637] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5a6fc89a-a8f1-4659-8764-454f5eecb44b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.379629] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 817.379629] env[62070]: value = "task-1121770" [ 817.379629] env[62070]: _type = "Task" [ 817.379629] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.401849] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121770, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.505169] env[62070]: DEBUG oslo_vmware.api [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121760, 'name': CloneVM_Task, 'duration_secs': 1.973559} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.505169] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121769, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.506777] env[62070]: INFO nova.virt.vmwareapi.vmops [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Created linked-clone VM from snapshot [ 817.506777] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb02351-8932-4a09-8b47-d069a9b27b55 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.517083] env[62070]: DEBUG nova.virt.vmwareapi.images [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Uploading image 92cf8604-9c53-4de5-acd5-e4719f0927f0 {{(pid=62070) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 817.550245] env[62070]: DEBUG oslo_vmware.rw_handles [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 817.550245] env[62070]: value = "vm-245413" [ 817.550245] env[62070]: _type = "VirtualMachine" [ 817.550245] env[62070]: }. {{(pid=62070) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 817.550890] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ebecf0b2-a663-47cf-b7a4-3e21b0a24799 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.562226] env[62070]: DEBUG oslo_vmware.rw_handles [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lease: (returnval){ [ 817.562226] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5269bcc5-278f-cf3f-106f-4c576c91d466" [ 817.562226] env[62070]: _type = "HttpNfcLease" [ 817.562226] env[62070]: } obtained for exporting VM: (result){ [ 817.562226] env[62070]: value = "vm-245413" [ 817.562226] env[62070]: _type = "VirtualMachine" [ 817.562226] env[62070]: }. {{(pid=62070) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 817.563078] env[62070]: DEBUG oslo_vmware.api [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the lease: (returnval){ [ 817.563078] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5269bcc5-278f-cf3f-106f-4c576c91d466" [ 817.563078] env[62070]: _type = "HttpNfcLease" [ 817.563078] env[62070]: } to be ready. {{(pid=62070) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 817.574989] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 817.574989] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5269bcc5-278f-cf3f-106f-4c576c91d466" [ 817.574989] env[62070]: _type = "HttpNfcLease" [ 817.574989] env[62070]: } is initializing. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 817.625125] env[62070]: INFO nova.compute.manager [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Took 43.58 seconds to build instance. [ 817.706029] env[62070]: DEBUG nova.objects.base [None req-c3a2fe66-5321-413d-a208-355f06146e02 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Object Instance<10672096-00ba-4481-8ab3-085a185076db> lazy-loaded attributes: flavor,info_cache {{(pid=62070) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 817.789309] env[62070]: DEBUG oslo_vmware.api [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121764, 'name': CreateSnapshot_Task, 'duration_secs': 1.434826} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.789724] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Created Snapshot of the VM instance {{(pid=62070) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 817.790629] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7404320c-84bb-4520-9bf2-919e7420bb19 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.807693] env[62070]: DEBUG oslo_concurrency.lockutils [None req-805e6d62-3421-4e67-b8e8-d940be524c26 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "interface-0ac963b1-120a-464b-8228-3393135dd182-7df84135-5c3d-48c5-b2cf-176e77094879" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.129s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.809095] env[62070]: DEBUG nova.network.neutron [-] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.895189] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121770, 'name': Rename_Task, 'duration_secs': 0.308076} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.896765] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 817.896765] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7543e8d2-9ad3-4069-8538-14d83e93e3c9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.906125] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 817.906125] env[62070]: value = "task-1121772" [ 817.906125] env[62070]: _type = "Task" [ 817.906125] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.918209] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121772, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.938813] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a86faaad-f85c-423f-a951-10cb4c3dda64 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.952795] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88051c20-558e-4a82-9a2f-1318d81f6faa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.995856] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a30322-13ea-4358-9be7-0b1728994d7e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.009086] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-199ce904-5ad5-4f42-8b6c-ae448b017b48 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.014051] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121769, 'name': ReconfigVM_Task, 'duration_secs': 0.852841} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.014453] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Reconfigured VM instance instance-0000003a to attach disk [datastore1] c3c6e93c-80be-4e71-87fb-2ff8db8d30fe/c3c6e93c-80be-4e71-87fb-2ff8db8d30fe.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 818.015954] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aea40a19-6de8-44df-a6ba-e29a7508319a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.028078] env[62070]: DEBUG nova.compute.provider_tree [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 818.035522] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 818.035522] env[62070]: value = "task-1121773" [ 818.035522] env[62070]: _type = "Task" [ 818.035522] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.049731] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121773, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.080150] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 818.080150] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5269bcc5-278f-cf3f-106f-4c576c91d466" [ 818.080150] env[62070]: _type = "HttpNfcLease" [ 818.080150] env[62070]: } is ready. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 818.080150] env[62070]: DEBUG oslo_vmware.rw_handles [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 818.080150] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5269bcc5-278f-cf3f-106f-4c576c91d466" [ 818.080150] env[62070]: _type = "HttpNfcLease" [ 818.080150] env[62070]: }. {{(pid=62070) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 818.080150] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e18cd60-e39e-4ad6-a654-d9d3c8856b9d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.091863] env[62070]: DEBUG oslo_vmware.rw_handles [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d8a702-eb1e-b92b-34f7-40e05932ad2d/disk-0.vmdk from lease info. {{(pid=62070) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 818.092070] env[62070]: DEBUG oslo_vmware.rw_handles [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d8a702-eb1e-b92b-34f7-40e05932ad2d/disk-0.vmdk for reading. {{(pid=62070) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 818.165451] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9eb5f919-38d3-41ef-8aa0-3ff1772842b2 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "3d22f50a-e1b7-48f9-a044-df64d01dfeb4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 125.274s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.217308] env[62070]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ab0877ca-fed0-4d47-8168-d76a07af5423 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.323768] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Creating linked-clone VM from snapshot {{(pid=62070) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 818.324437] env[62070]: INFO nova.compute.manager [-] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Took 1.44 seconds to deallocate network for instance. [ 818.324577] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d2c3524a-b716-4979-b7c1-1fcdad073cba {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.342070] env[62070]: DEBUG oslo_vmware.api [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 818.342070] env[62070]: value = "task-1121774" [ 818.342070] env[62070]: _type = "Task" [ 818.342070] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.354135] env[62070]: DEBUG oslo_vmware.api [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121774, 'name': CloneVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.417547] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121772, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.532956] env[62070]: DEBUG nova.scheduler.client.report [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 818.548700] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121773, 'name': Rename_Task, 'duration_secs': 0.180704} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.549178] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 818.550123] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7ed2d841-9cf1-40dd-9275-ef6724c48f9e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.559865] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 818.559865] env[62070]: value = "task-1121775" [ 818.559865] env[62070]: _type = "Task" [ 818.559865] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.576113] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121775, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.591279] env[62070]: DEBUG nova.network.neutron [None req-c3a2fe66-5321-413d-a208-355f06146e02 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Updating instance_info_cache with network_info: [{"id": "d7ba9e32-c151-4f86-90ea-ba3a9dc7ea10", "address": "fa:16:3e:0b:04:cf", "network": {"id": "25f38244-53c8-44e1-a7a9-a8a37ce83ffa", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1382757376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7525c88cd803420094fb2af593ba5d65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7ba9e32-c1", "ovs_interfaceid": "d7ba9e32-c151-4f86-90ea-ba3a9dc7ea10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.844296] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.854767] env[62070]: DEBUG oslo_vmware.api [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121774, 'name': CloneVM_Task} progress is 94%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.917979] env[62070]: DEBUG oslo_vmware.api [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121772, 'name': PowerOnVM_Task, 'duration_secs': 0.861551} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.918329] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 818.918551] env[62070]: INFO nova.compute.manager [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Took 9.37 seconds to spawn the instance on the hypervisor. [ 818.918778] env[62070]: DEBUG nova.compute.manager [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 818.920071] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437a21bf-3e78-4ddd-91fb-e40bef7a54ed {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.039746] env[62070]: DEBUG oslo_concurrency.lockutils [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.661s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.040679] env[62070]: DEBUG nova.compute.manager [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 819.046180] env[62070]: DEBUG oslo_concurrency.lockutils [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.549s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.046302] env[62070]: DEBUG oslo_concurrency.lockutils [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.050355] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 28.361s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.078895] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121775, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.096498] env[62070]: INFO nova.scheduler.client.report [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Deleted allocations for instance d148d561-3211-4f1f-965a-f2b14cd60b11 [ 819.097875] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c3a2fe66-5321-413d-a208-355f06146e02 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Releasing lock "refresh_cache-10672096-00ba-4481-8ab3-085a185076db" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.172457] env[62070]: DEBUG nova.compute.manager [req-c5f5944b-6854-4905-9930-53bb0025c3df req-fd874249-3a45-4c80-8a6b-3146f98d74b8 service nova] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Received event network-vif-deleted-90d77a95-7927-420d-8c8d-2f64e885a145 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 819.356180] env[62070]: DEBUG oslo_vmware.api [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121774, 'name': CloneVM_Task} progress is 94%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.371391] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "62758a38-4819-4d5a-97ed-db6c9ceb97bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 819.371654] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "62758a38-4819-4d5a-97ed-db6c9ceb97bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.447888] env[62070]: INFO nova.compute.manager [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Took 37.18 seconds to build instance. [ 819.555754] env[62070]: DEBUG nova.compute.utils [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 819.568890] env[62070]: DEBUG nova.compute.manager [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 819.569531] env[62070]: DEBUG nova.network.neutron [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 819.589028] env[62070]: DEBUG oslo_vmware.api [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121775, 'name': PowerOnVM_Task, 'duration_secs': 0.74788} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.589028] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 819.589028] env[62070]: DEBUG nova.compute.manager [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 819.589028] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c32e405-f531-491d-a2cb-3572ec968d02 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.603650] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3a2fe66-5321-413d-a208-355f06146e02 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 819.607827] env[62070]: DEBUG oslo_concurrency.lockutils [None req-41f5ad7c-2434-4c65-9485-20b83ee7fd19 tempest-MigrationsAdminTest-1554847369 tempest-MigrationsAdminTest-1554847369-project-member] Lock "d148d561-3211-4f1f-965a-f2b14cd60b11" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.663s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.609029] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-adbedb5c-bd6b-4fd1-9853-d644d26c4dbb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.621724] env[62070]: DEBUG oslo_vmware.api [None req-c3a2fe66-5321-413d-a208-355f06146e02 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 819.621724] env[62070]: value = "task-1121776" [ 819.621724] env[62070]: _type = "Task" [ 819.621724] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.635156] env[62070]: DEBUG oslo_vmware.api [None req-c3a2fe66-5321-413d-a208-355f06146e02 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121776, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.648964] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 819.649566] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 819.649980] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Deleting the datastore file [datastore1] 0ac963b1-120a-464b-8228-3393135dd182 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 819.651266] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d6e9fae-b4d0-4836-b969-db6e549c4230 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.663062] env[62070]: DEBUG oslo_vmware.api [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 819.663062] env[62070]: value = "task-1121777" [ 819.663062] env[62070]: _type = "Task" [ 819.663062] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.672823] env[62070]: DEBUG oslo_vmware.api [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121777, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.792861] env[62070]: DEBUG nova.policy [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '764054cb62c34dfba758826f43a6fdaa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e85c0cc8e0f544bfbb76970d3123fbb7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 819.858465] env[62070]: DEBUG oslo_vmware.api [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121774, 'name': CloneVM_Task, 'duration_secs': 1.429765} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.858917] env[62070]: INFO nova.virt.vmwareapi.vmops [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Created linked-clone VM from snapshot [ 819.860503] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d8d585-435b-4ecb-9277-5dba92039034 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.872069] env[62070]: DEBUG nova.virt.vmwareapi.images [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Uploading image f721b0bd-3ff9-47e3-950c-4df5b98cd172 {{(pid=62070) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 819.875428] env[62070]: DEBUG nova.compute.manager [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 819.919505] env[62070]: DEBUG oslo_vmware.rw_handles [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 819.919505] env[62070]: value = "vm-245415" [ 819.919505] env[62070]: _type = "VirtualMachine" [ 819.919505] env[62070]: }. {{(pid=62070) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 819.923238] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-02069c12-c841-40de-9c1f-feb06f4e991d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.936789] env[62070]: DEBUG oslo_vmware.rw_handles [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lease: (returnval){ [ 819.936789] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5291306a-5abc-0984-763b-97c97e975ed0" [ 819.936789] env[62070]: _type = "HttpNfcLease" [ 819.936789] env[62070]: } obtained for exporting VM: (result){ [ 819.936789] env[62070]: value = "vm-245415" [ 819.936789] env[62070]: _type = "VirtualMachine" [ 819.936789] env[62070]: }. {{(pid=62070) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 819.936789] env[62070]: DEBUG oslo_vmware.api [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the lease: (returnval){ [ 819.936789] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5291306a-5abc-0984-763b-97c97e975ed0" [ 819.936789] env[62070]: _type = "HttpNfcLease" [ 819.936789] env[62070]: } to be ready. {{(pid=62070) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 819.952567] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93ddb69e-2ee1-4bdc-9fb9-c0932c81d871 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "5ec9074b-1237-4404-b13c-a7ca0dbe1d43" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 126.585s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.953110] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 819.953110] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5291306a-5abc-0984-763b-97c97e975ed0" [ 819.953110] env[62070]: _type = "HttpNfcLease" [ 819.953110] env[62070]: } is ready. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 819.953617] env[62070]: DEBUG oslo_vmware.rw_handles [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 819.953617] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5291306a-5abc-0984-763b-97c97e975ed0" [ 819.953617] env[62070]: _type = "HttpNfcLease" [ 819.953617] env[62070]: }. {{(pid=62070) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 819.954817] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afafc36d-c903-4f99-bd31-2be2303bd8c8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.968390] env[62070]: DEBUG oslo_vmware.rw_handles [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cddf42-8fb8-8c35-7949-a3a618580b7a/disk-0.vmdk from lease info. {{(pid=62070) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 819.968700] env[62070]: DEBUG oslo_vmware.rw_handles [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cddf42-8fb8-8c35-7949-a3a618580b7a/disk-0.vmdk for reading. {{(pid=62070) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 820.078297] env[62070]: DEBUG nova.compute.manager [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 820.106377] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 0ac963b1-120a-464b-8228-3393135dd182 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 820.106616] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 71aead12-a182-40a7-b5a9-91c01271b800 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 820.106802] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance d0914f90-200c-4715-aaab-54beacf339b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 820.107013] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 67e99ada-a8e6-4034-b19b-5b2cb883b735 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 820.107168] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 963feecc-ff58-4cbb-8d6f-3f9035337087 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 820.107414] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance a3c42653-9a4b-42d3-bc38-8d46d95c8f64 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 820.108387] env[62070]: WARNING nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance dd5d90e8-964a-4e1c-a98a-bcba37a1d79e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 820.108579] env[62070]: WARNING nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 748c94c7-1233-44f4-a71a-176b26518399 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 820.108716] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 359ae9f2-a907-459e-99b9-3e043d5d015f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 820.109032] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 820.109691] env[62070]: WARNING nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 1ce155c8-9a10-4eff-b428-31889aa8f638 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 820.109877] env[62070]: WARNING nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance b7fdf23e-1e39-4745-ae84-38b7fa89aa5d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 820.110071] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 10672096-00ba-4481-8ab3-085a185076db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 820.110263] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 2368b649-f931-454c-92cc-971df4155d90 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 820.110434] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance f75ed36e-16c8-4a6b-bd39-eb4057ef0691 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 820.114037] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance c3c6e93c-80be-4e71-87fb-2ff8db8d30fe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 820.114404] env[62070]: WARNING nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance efef4aac-5b74-4a41-9f74-3d4cb4f80cdb is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 820.114574] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 328fbc92-8162-4e12-a02d-6e9cafe0c365 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 820.114826] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 3d22f50a-e1b7-48f9-a044-df64d01dfeb4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 820.115038] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 5ec9074b-1237-4404-b13c-a7ca0dbe1d43 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 820.115247] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 4a5f644a-1670-4c6b-a762-f87f1ee4cce5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 820.121445] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.124084] env[62070]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6d6fe375-6c16-4e88-bee5-9d80f15db0af {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.143825] env[62070]: DEBUG oslo_vmware.api [None req-c3a2fe66-5321-413d-a208-355f06146e02 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121776, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.175664] env[62070]: DEBUG oslo_vmware.api [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121777, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.37982} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.176689] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 820.176689] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 820.176689] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 820.176689] env[62070]: INFO nova.compute.manager [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Took 4.27 seconds to destroy the instance on the hypervisor. [ 820.177364] env[62070]: DEBUG oslo.service.loopingcall [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 820.177364] env[62070]: DEBUG nova.compute.manager [-] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 820.177364] env[62070]: DEBUG nova.network.neutron [-] [instance: 0ac963b1-120a-464b-8228-3393135dd182] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 820.308221] env[62070]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port 848ce3e0-8d08-460b-b770-75628ae28fd3 could not be found.", "detail": ""}} {{(pid=62070) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 820.308484] env[62070]: DEBUG nova.network.neutron [-] Unable to show port 848ce3e0-8d08-460b-b770-75628ae28fd3 as it no longer exists. {{(pid=62070) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 820.399996] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.618354] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance e74fd58c-cfa8-45c4-8f02-96234b4a9192 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 820.643412] env[62070]: DEBUG oslo_vmware.api [None req-c3a2fe66-5321-413d-a208-355f06146e02 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121776, 'name': PowerOnVM_Task, 'duration_secs': 0.550869} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.645398] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3a2fe66-5321-413d-a208-355f06146e02 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 820.647846] env[62070]: DEBUG nova.compute.manager [None req-c3a2fe66-5321-413d-a208-355f06146e02 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 820.649345] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc2bfc6f-4d63-4b76-a8ff-0a6dc55215f6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.743689] env[62070]: DEBUG nova.network.neutron [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Successfully created port: 3a8213ef-a979-487a-8756-7bfecdf4ba10 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 821.023180] env[62070]: INFO nova.compute.manager [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Rescuing [ 821.023614] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "refresh_cache-5ec9074b-1237-4404-b13c-a7ca0dbe1d43" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.023917] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquired lock "refresh_cache-5ec9074b-1237-4404-b13c-a7ca0dbe1d43" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.024068] env[62070]: DEBUG nova.network.neutron [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 821.094276] env[62070]: DEBUG nova.compute.manager [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 821.121542] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 65fe3720-95cb-4620-b1c7-eae9e3bc3943 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 821.625264] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 27987ff6-77c9-4876-8b39-dcc20ce4158a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 821.639309] env[62070]: DEBUG nova.compute.manager [req-f0fbf9b2-b944-46af-980f-b58e6a6f0a3f req-9be41166-a704-409e-9413-2c98cf143e1e service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Received event network-vif-deleted-6326b098-3c76-4152-b623-8921285ec01b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 821.639724] env[62070]: INFO nova.compute.manager [req-f0fbf9b2-b944-46af-980f-b58e6a6f0a3f req-9be41166-a704-409e-9413-2c98cf143e1e service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Neutron deleted interface 6326b098-3c76-4152-b623-8921285ec01b; detaching it from the instance and deleting it from the info cache [ 821.640075] env[62070]: DEBUG nova.network.neutron [req-f0fbf9b2-b944-46af-980f-b58e6a6f0a3f req-9be41166-a704-409e-9413-2c98cf143e1e service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.694026] env[62070]: DEBUG nova.network.neutron [-] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.135264] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance e5deccf6-f967-4e3c-bee0-2e1ad0bb4560 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 822.145219] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-319b6463-e2d5-4616-9148-15455946944d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.159243] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17faddc6-50f8-4574-bca7-dc830fa3ae42 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.210284] env[62070]: INFO nova.compute.manager [-] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Took 2.03 seconds to deallocate network for instance. [ 822.210484] env[62070]: DEBUG nova.compute.manager [req-f0fbf9b2-b944-46af-980f-b58e6a6f0a3f req-9be41166-a704-409e-9413-2c98cf143e1e service nova] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Detach interface failed, port_id=6326b098-3c76-4152-b623-8921285ec01b, reason: Instance 0ac963b1-120a-464b-8228-3393135dd182 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 822.215179] env[62070]: DEBUG nova.network.neutron [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Updating instance_info_cache with network_info: [{"id": "0b2740c1-7e91-45f2-b9e3-95b268c21eff", "address": "fa:16:3e:b7:ff:91", "network": {"id": "754f4ec8-0bc6-4726-8b88-1a4e1a326699", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-293486644-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a94db233e3a43dc9aa7cf887c6cb1f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b2740c1-7e", "ovs_interfaceid": "0b2740c1-7e91-45f2-b9e3-95b268c21eff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.640096] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance c16d175c-0b23-4f72-bdb0-844c6f80fd32 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 822.722683] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Releasing lock "refresh_cache-5ec9074b-1237-4404-b13c-a7ca0dbe1d43" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.726297] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.103960] env[62070]: DEBUG nova.network.neutron [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Successfully updated port: 3a8213ef-a979-487a-8756-7bfecdf4ba10 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 823.143204] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 62758a38-4819-4d5a-97ed-db6c9ceb97bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 823.143671] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=62070) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 823.143930] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3648MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=62070) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 823.523210] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c420633f-b550-4ce0-9b59-a2ab178f2945 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.531417] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739ce2e3-8257-46b8-a3ee-b42fc624af3e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.564650] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66de87fc-dcce-4297-8aa0-ee562f732959 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.573463] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-733da1c7-ae86-46a7-80a5-0ab4491ab1b2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.589110] env[62070]: DEBUG nova.compute.provider_tree [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 823.609879] env[62070]: DEBUG oslo_concurrency.lockutils [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "refresh_cache-4a5f644a-1670-4c6b-a762-f87f1ee4cce5" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 823.609879] env[62070]: DEBUG oslo_concurrency.lockutils [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquired lock "refresh_cache-4a5f644a-1670-4c6b-a762-f87f1ee4cce5" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.609879] env[62070]: DEBUG nova.network.neutron [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 824.091251] env[62070]: DEBUG nova.scheduler.client.report [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 824.156036] env[62070]: DEBUG nova.network.neutron [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 824.315726] env[62070]: DEBUG nova.network.neutron [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Updating instance_info_cache with network_info: [{"id": "3a8213ef-a979-487a-8756-7bfecdf4ba10", "address": "fa:16:3e:25:2e:9f", "network": {"id": "443d2d62-bcef-44b2-814a-3e5dc50abc04", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-772061432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85c0cc8e0f544bfbb76970d3123fbb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a8213ef-a9", "ovs_interfaceid": "3a8213ef-a979-487a-8756-7bfecdf4ba10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.600209] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62070) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 824.600560] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.551s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.600963] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.537s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.601246] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.604960] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.932s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.607069] env[62070]: INFO nova.compute.claims [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 824.610801] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 824.610995] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Cleaning up deleted instances {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 824.637807] env[62070]: INFO nova.scheduler.client.report [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Deleted allocations for instance 1ce155c8-9a10-4eff-b428-31889aa8f638 [ 824.818523] env[62070]: DEBUG oslo_concurrency.lockutils [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Releasing lock "refresh_cache-4a5f644a-1670-4c6b-a762-f87f1ee4cce5" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 824.818897] env[62070]: DEBUG nova.compute.manager [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Instance network_info: |[{"id": "3a8213ef-a979-487a-8756-7bfecdf4ba10", "address": "fa:16:3e:25:2e:9f", "network": {"id": "443d2d62-bcef-44b2-814a-3e5dc50abc04", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-772061432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85c0cc8e0f544bfbb76970d3123fbb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a8213ef-a9", "ovs_interfaceid": "3a8213ef-a979-487a-8756-7bfecdf4ba10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 825.119126] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] There are 12 instances to clean {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 825.119429] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 20e7a993-b1fb-4359-ab35-8b0f06ca0121] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 825.146030] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0d5d1c6b-8304-4910-b6d3-9f66b7a67ef9 tempest-ServersTestMultiNic-29920691 tempest-ServersTestMultiNic-29920691-project-member] Lock "1ce155c8-9a10-4eff-b428-31889aa8f638" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.348s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.626028] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 13e3576e-4f4c-4541-a637-daa124cbf8dd] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 825.963222] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2282756f-b6b2-468c-8d7b-53ced8e59126 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.973598] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8684446-a83a-410e-b72f-8f507eea9c93 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.004537] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7afb02-e4cd-411b-892e-c96554f52510 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.013362] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273c4006-0640-4667-9323-21ca91481286 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.028239] env[62070]: DEBUG nova.compute.provider_tree [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 826.130124] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: a3fcb849-b015-43aa-8f95-0d4a87e2cecc] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 826.531719] env[62070]: DEBUG nova.scheduler.client.report [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 826.634393] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 42a5c5d8-5c3a-4568-b212-d87f2951a334] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 827.037623] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.433s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.038194] env[62070]: DEBUG nova.compute.manager [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 827.041450] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.972s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.042947] env[62070]: INFO nova.compute.claims [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 827.139164] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 5a146d8f-6921-4b3e-8696-d2804fb855ba] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 827.543570] env[62070]: DEBUG nova.compute.utils [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 827.546752] env[62070]: DEBUG nova.compute.manager [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 827.546965] env[62070]: DEBUG nova.network.neutron [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 827.601076] env[62070]: DEBUG nova.policy [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b47f99a84e374b8497de25413344f035', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2ecbd5f22c024de8a6b1c45096cb79a7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 827.644435] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: bcafa04d-904b-4eab-aba1-35180c2d4b22] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 827.885915] env[62070]: DEBUG nova.network.neutron [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Successfully created port: 6444a30f-9c50-4eaf-b562-178b627dc0f1 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 828.052591] env[62070]: DEBUG nova.compute.manager [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 828.148136] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: fe378560-40b8-42c9-840d-b7d60de87c4d] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 828.412747] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3bce920-27c7-4adf-811b-bbe4890dd6c8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.421560] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d8a2ecf-edcd-4c0f-8ef8-f23f5aa53bc2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.454419] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742af07c-5f20-4d30-8684-1c6b353fbd58 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.463068] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a8474e8-1ef5-4cd0-a5c8-426b46858657 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.478209] env[62070]: DEBUG nova.compute.provider_tree [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 828.651988] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 076aed5b-4b08-4f3b-a940-d9cd95c32e57] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 828.981492] env[62070]: DEBUG nova.scheduler.client.report [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 829.040338] env[62070]: DEBUG nova.virt.hardware [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 829.040694] env[62070]: DEBUG nova.virt.hardware [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 829.040929] env[62070]: DEBUG nova.virt.hardware [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 829.041142] env[62070]: DEBUG nova.virt.hardware [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 829.041362] env[62070]: DEBUG nova.virt.hardware [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 829.041595] env[62070]: DEBUG nova.virt.hardware [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 829.041853] env[62070]: DEBUG nova.virt.hardware [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 829.042043] env[62070]: DEBUG nova.virt.hardware [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 829.042321] env[62070]: DEBUG nova.virt.hardware [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 829.042558] env[62070]: DEBUG nova.virt.hardware [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 829.042774] env[62070]: DEBUG nova.virt.hardware [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 829.045728] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d31ec134-d34d-47a2-b2ad-ca5c3e15c611 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.059626] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16e1ca62-120f-41e4-b1c6-f9ad99b0cc24 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.066933] env[62070]: DEBUG nova.compute.manager [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 829.072903] env[62070]: DEBUG oslo_vmware.rw_handles [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d8a702-eb1e-b92b-34f7-40e05932ad2d/disk-0.vmdk. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 829.074243] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1878b36-df03-451a-becc-18b177eb9935 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.086147] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:25:2e:9f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e1049e8-c06b-4c93-a9e1-2cbb530f3f95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3a8213ef-a979-487a-8756-7bfecdf4ba10', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 829.093364] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Creating folder: Project (e85c0cc8e0f544bfbb76970d3123fbb7). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 829.096296] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e20c97ed-19f2-4c7c-9455-512e396db572 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.100114] env[62070]: DEBUG oslo_vmware.rw_handles [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d8a702-eb1e-b92b-34f7-40e05932ad2d/disk-0.vmdk is in state: ready. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 829.100305] env[62070]: ERROR oslo_vmware.rw_handles [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d8a702-eb1e-b92b-34f7-40e05932ad2d/disk-0.vmdk due to incomplete transfer. [ 829.100551] env[62070]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-678e5ac0-a4a3-4867-b814-6c223b10c568 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.106493] env[62070]: DEBUG nova.virt.hardware [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 829.106804] env[62070]: DEBUG nova.virt.hardware [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 829.107046] env[62070]: DEBUG nova.virt.hardware [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 829.107270] env[62070]: DEBUG nova.virt.hardware [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 829.107593] env[62070]: DEBUG nova.virt.hardware [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 829.107709] env[62070]: DEBUG nova.virt.hardware [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 829.107941] env[62070]: DEBUG nova.virt.hardware [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 829.108123] env[62070]: DEBUG nova.virt.hardware [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 829.108322] env[62070]: DEBUG nova.virt.hardware [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 829.108508] env[62070]: DEBUG nova.virt.hardware [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 829.108690] env[62070]: DEBUG nova.virt.hardware [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 829.109910] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8acc42b-7a09-48e0-98e4-24c211010343 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.114279] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Created folder: Project (e85c0cc8e0f544bfbb76970d3123fbb7) in parent group-v245319. [ 829.114491] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Creating folder: Instances. Parent ref: group-v245416. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 829.115063] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3b091a95-184f-432d-8acf-341f64dbc343 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.123019] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54d73ba8-516e-44d0-910e-5eddf53e2df2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.127255] env[62070]: DEBUG oslo_vmware.rw_handles [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d8a702-eb1e-b92b-34f7-40e05932ad2d/disk-0.vmdk. {{(pid=62070) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 829.127506] env[62070]: DEBUG nova.virt.vmwareapi.images [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Uploaded image 92cf8604-9c53-4de5-acd5-e4719f0927f0 to the Glance image server {{(pid=62070) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 829.130083] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Destroying the VM {{(pid=62070) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 829.135024] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-88ebc7d8-9ef8-4f8b-a6a3-9fc16601dc25 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.136990] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Created folder: Instances in parent group-v245416. [ 829.137364] env[62070]: DEBUG oslo.service.loopingcall [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 829.138202] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 829.138851] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ddf7ded4-2f34-4519-8355-594df9bc9089 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.167434] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 1c1730e5-88af-4c7f-8bcc-d494db2cd723] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 829.170877] env[62070]: DEBUG oslo_vmware.api [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 829.170877] env[62070]: value = "task-1121781" [ 829.170877] env[62070]: _type = "Task" [ 829.170877] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.183079] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 829.183079] env[62070]: value = "task-1121782" [ 829.183079] env[62070]: _type = "Task" [ 829.183079] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.183563] env[62070]: DEBUG oslo_vmware.api [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121781, 'name': Destroy_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.199723] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121782, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.490021] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.445s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.490021] env[62070]: DEBUG nova.compute.manager [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 829.491138] env[62070]: DEBUG oslo_concurrency.lockutils [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.546s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.492142] env[62070]: DEBUG oslo_concurrency.lockutils [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.494660] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.362s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.496278] env[62070]: INFO nova.compute.claims [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 829.539336] env[62070]: INFO nova.scheduler.client.report [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Deleted allocations for instance b7fdf23e-1e39-4745-ae84-38b7fa89aa5d [ 829.572032] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 829.572032] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18c7e515-d4ed-45ed-9551-f45132bc334b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.578830] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 829.578830] env[62070]: value = "task-1121783" [ 829.578830] env[62070]: _type = "Task" [ 829.578830] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.588393] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121783, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.620151] env[62070]: DEBUG nova.network.neutron [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Successfully updated port: 6444a30f-9c50-4eaf-b562-178b627dc0f1 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 829.672207] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 30d782e4-30c7-41f6-b30d-95a9a59cf83c] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 829.684657] env[62070]: DEBUG oslo_vmware.api [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121781, 'name': Destroy_Task, 'duration_secs': 0.440038} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.684903] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Destroyed the VM [ 829.688027] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Deleting Snapshot of the VM instance {{(pid=62070) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 829.688027] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-04d4f77a-748f-4467-bb53-a7fc82384c6d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.691367] env[62070]: DEBUG oslo_concurrency.lockutils [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "c3c6e93c-80be-4e71-87fb-2ff8db8d30fe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.691615] env[62070]: DEBUG oslo_concurrency.lockutils [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "c3c6e93c-80be-4e71-87fb-2ff8db8d30fe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.691876] env[62070]: DEBUG oslo_concurrency.lockutils [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "c3c6e93c-80be-4e71-87fb-2ff8db8d30fe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.692575] env[62070]: DEBUG oslo_concurrency.lockutils [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "c3c6e93c-80be-4e71-87fb-2ff8db8d30fe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.693846] env[62070]: DEBUG oslo_concurrency.lockutils [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "c3c6e93c-80be-4e71-87fb-2ff8db8d30fe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.696737] env[62070]: INFO nova.compute.manager [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Terminating instance [ 829.707202] env[62070]: DEBUG oslo_vmware.api [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 829.707202] env[62070]: value = "task-1121784" [ 829.707202] env[62070]: _type = "Task" [ 829.707202] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.707412] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121782, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.708631] env[62070]: DEBUG nova.compute.manager [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 829.708842] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 829.710022] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-292f7303-f4d7-4b47-9caf-cbe46f8a315d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.723478] env[62070]: DEBUG oslo_vmware.api [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121784, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.725728] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 829.726601] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c99bc801-75d0-4197-93d9-5b9683fd57c5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.737266] env[62070]: DEBUG oslo_vmware.api [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 829.737266] env[62070]: value = "task-1121785" [ 829.737266] env[62070]: _type = "Task" [ 829.737266] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.747168] env[62070]: DEBUG oslo_vmware.api [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121785, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.004827] env[62070]: DEBUG nova.compute.utils [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 830.009858] env[62070]: DEBUG nova.compute.manager [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 830.009858] env[62070]: DEBUG nova.network.neutron [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 830.055651] env[62070]: DEBUG oslo_concurrency.lockutils [None req-269cfec7-5851-424c-8fb7-c6840fcea8f3 tempest-VolumesAdminNegativeTest-166407224 tempest-VolumesAdminNegativeTest-166407224-project-member] Lock "b7fdf23e-1e39-4745-ae84-38b7fa89aa5d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.079s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.077518] env[62070]: DEBUG nova.compute.manager [req-b6b89f89-4091-4b29-ab14-493590f26ffe req-d46cb8b5-87c3-403a-a5e6-207616d23239 service nova] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Received event network-vif-plugged-3a8213ef-a979-487a-8756-7bfecdf4ba10 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 830.077756] env[62070]: DEBUG oslo_concurrency.lockutils [req-b6b89f89-4091-4b29-ab14-493590f26ffe req-d46cb8b5-87c3-403a-a5e6-207616d23239 service nova] Acquiring lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.077981] env[62070]: DEBUG oslo_concurrency.lockutils [req-b6b89f89-4091-4b29-ab14-493590f26ffe req-d46cb8b5-87c3-403a-a5e6-207616d23239 service nova] Lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.078579] env[62070]: DEBUG oslo_concurrency.lockutils [req-b6b89f89-4091-4b29-ab14-493590f26ffe req-d46cb8b5-87c3-403a-a5e6-207616d23239 service nova] Lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.078862] env[62070]: DEBUG nova.compute.manager [req-b6b89f89-4091-4b29-ab14-493590f26ffe req-d46cb8b5-87c3-403a-a5e6-207616d23239 service nova] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] No waiting events found dispatching network-vif-plugged-3a8213ef-a979-487a-8756-7bfecdf4ba10 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 830.078862] env[62070]: WARNING nova.compute.manager [req-b6b89f89-4091-4b29-ab14-493590f26ffe req-d46cb8b5-87c3-403a-a5e6-207616d23239 service nova] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Received unexpected event network-vif-plugged-3a8213ef-a979-487a-8756-7bfecdf4ba10 for instance with vm_state building and task_state spawning. [ 830.091727] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121783, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.118028] env[62070]: DEBUG nova.policy [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'db9baf29d0b5489da2657286bfd695c0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91e246e32f29422e90fae974cfee9d8f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 830.125200] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquiring lock "refresh_cache-e74fd58c-cfa8-45c4-8f02-96234b4a9192" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.125503] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquired lock "refresh_cache-e74fd58c-cfa8-45c4-8f02-96234b4a9192" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.125772] env[62070]: DEBUG nova.network.neutron [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 830.179644] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: d148d561-3211-4f1f-965a-f2b14cd60b11] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 830.198117] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121782, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.224983] env[62070]: DEBUG oslo_vmware.api [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121784, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.249268] env[62070]: DEBUG oslo_vmware.api [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121785, 'name': PowerOffVM_Task, 'duration_secs': 0.245099} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.252848] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 830.252848] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 830.252848] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f8b8531b-4a85-451b-9426-57beb7a65e70 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.323129] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 830.323516] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 830.323785] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Deleting the datastore file [datastore1] c3c6e93c-80be-4e71-87fb-2ff8db8d30fe {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 830.324182] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5f22692f-b716-48ec-8a8b-5797c8fae4d2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.333302] env[62070]: DEBUG oslo_vmware.api [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 830.333302] env[62070]: value = "task-1121787" [ 830.333302] env[62070]: _type = "Task" [ 830.333302] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.351507] env[62070]: DEBUG oslo_vmware.api [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121787, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.508083] env[62070]: DEBUG nova.compute.manager [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 830.592028] env[62070]: DEBUG nova.network.neutron [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Successfully created port: 0f1aed27-115c-407c-b787-21d92045c9fd {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 830.603213] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121783, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.685750] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 283e7488-1240-475f-a74d-809251950774] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 830.689842] env[62070]: DEBUG nova.network.neutron [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 830.712411] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121782, 'name': CreateVM_Task, 'duration_secs': 1.488355} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.712411] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 830.714426] env[62070]: DEBUG oslo_concurrency.lockutils [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.714588] env[62070]: DEBUG oslo_concurrency.lockutils [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.714923] env[62070]: DEBUG oslo_concurrency.lockutils [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 830.716485] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cf45886-8cf1-49a4-af1a-09222e874814 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.723759] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Acquiring lock "61ab347d-1342-4f59-8955-10d575993b77" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.724126] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Lock "61ab347d-1342-4f59-8955-10d575993b77" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.738081] env[62070]: DEBUG oslo_vmware.api [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121784, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.738550] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 830.738550] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5292c386-7ec8-b995-4123-a63c931253c9" [ 830.738550] env[62070]: _type = "Task" [ 830.738550] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.754109] env[62070]: DEBUG oslo_vmware.rw_handles [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cddf42-8fb8-8c35-7949-a3a618580b7a/disk-0.vmdk. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 830.755140] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5292c386-7ec8-b995-4123-a63c931253c9, 'name': SearchDatastore_Task, 'duration_secs': 0.015825} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.756095] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ef4892-8baa-4f74-9cc4-c3273add300a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.759249] env[62070]: DEBUG oslo_concurrency.lockutils [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.759249] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 830.759449] env[62070]: DEBUG oslo_concurrency.lockutils [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.759618] env[62070]: DEBUG oslo_concurrency.lockutils [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.759807] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 830.760412] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b21aa64-9012-45c7-bceb-66351ada4c67 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.769305] env[62070]: DEBUG oslo_vmware.rw_handles [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cddf42-8fb8-8c35-7949-a3a618580b7a/disk-0.vmdk is in state: ready. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 830.769485] env[62070]: ERROR oslo_vmware.rw_handles [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cddf42-8fb8-8c35-7949-a3a618580b7a/disk-0.vmdk due to incomplete transfer. [ 830.769724] env[62070]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-088e697f-be5c-4ede-9895-5d978600315a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.772297] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 830.772486] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 830.773567] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-613b782d-43ff-472a-887c-33ad3c5b6523 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.779755] env[62070]: DEBUG oslo_vmware.rw_handles [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cddf42-8fb8-8c35-7949-a3a618580b7a/disk-0.vmdk. {{(pid=62070) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 830.780031] env[62070]: DEBUG nova.virt.vmwareapi.images [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Uploaded image f721b0bd-3ff9-47e3-950c-4df5b98cd172 to the Glance image server {{(pid=62070) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 830.781634] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Destroying the VM {{(pid=62070) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 830.783102] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9bb26e90-70af-4893-9e90-e9692aa66d5f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.785167] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 830.785167] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52e22c1b-4573-2d8d-0cf1-c4938c802322" [ 830.785167] env[62070]: _type = "Task" [ 830.785167] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.799733] env[62070]: DEBUG oslo_vmware.api [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 830.799733] env[62070]: value = "task-1121788" [ 830.799733] env[62070]: _type = "Task" [ 830.799733] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.802184] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52e22c1b-4573-2d8d-0cf1-c4938c802322, 'name': SearchDatastore_Task, 'duration_secs': 0.018152} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.809102] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f7695d0-c61c-4c4e-ad1f-cb852a75cdb1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.816232] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 830.816232] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5255c554-f897-e9e8-0aa9-5d094785d956" [ 830.816232] env[62070]: _type = "Task" [ 830.816232] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.820170] env[62070]: DEBUG oslo_vmware.api [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121788, 'name': Destroy_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.835554] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5255c554-f897-e9e8-0aa9-5d094785d956, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.852902] env[62070]: DEBUG oslo_vmware.api [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121787, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.237106} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.853748] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 830.854224] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 830.854443] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 830.854667] env[62070]: INFO nova.compute.manager [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Took 1.15 seconds to destroy the instance on the hypervisor. [ 830.855077] env[62070]: DEBUG oslo.service.loopingcall [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 830.855490] env[62070]: DEBUG nova.compute.manager [-] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 830.855661] env[62070]: DEBUG nova.network.neutron [-] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 830.932609] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Acquiring lock "4bba7448-69f7-4764-9ae6-eb6585f71515" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.932932] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Lock "4bba7448-69f7-4764-9ae6-eb6585f71515" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.998030] env[62070]: DEBUG nova.network.neutron [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Updating instance_info_cache with network_info: [{"id": "6444a30f-9c50-4eaf-b562-178b627dc0f1", "address": "fa:16:3e:ec:99:7c", "network": {"id": "08004b49-dbc2-4186-9e28-4268e947e8ee", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2022236674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2ecbd5f22c024de8a6b1c45096cb79a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6444a30f-9c", "ovs_interfaceid": "6444a30f-9c50-4eaf-b562-178b627dc0f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.037043] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a35bbcc-7477-4097-b3ff-cf8354ef2319 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.047046] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-386b0637-eefb-474d-b700-6b5fa4933ec4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.079825] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f7fad72-3286-4378-b83b-bf6b46c87fd3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.097052] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc48722a-7383-4aa4-8bf1-8f44a4f235e3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.101157] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121783, 'name': PowerOffVM_Task, 'duration_secs': 1.332656} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.102092] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 831.103285] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d91ba4b-df8a-40b1-84d1-2c3daee2cfbf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.113640] env[62070]: DEBUG nova.compute.provider_tree [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 831.133941] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e719e93-c846-4cd1-9d40-1fe6d4b5ccbe {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.173063] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 831.173379] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4b8c7da1-7fdc-41c8-ac72-45b06304c602 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.183075] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 831.183075] env[62070]: value = "task-1121789" [ 831.183075] env[62070]: _type = "Task" [ 831.183075] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.192978] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121789, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.192978] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 831.193125] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Cleaning up deleted instances with incomplete migration {{(pid=62070) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 831.222848] env[62070]: DEBUG oslo_vmware.api [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121784, 'name': RemoveSnapshot_Task} progress is 50%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.236177] env[62070]: DEBUG nova.compute.manager [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 831.318856] env[62070]: DEBUG oslo_vmware.api [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121788, 'name': Destroy_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.331990] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5255c554-f897-e9e8-0aa9-5d094785d956, 'name': SearchDatastore_Task, 'duration_secs': 0.030288} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.332290] env[62070]: DEBUG oslo_concurrency.lockutils [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.332557] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 4a5f644a-1670-4c6b-a762-f87f1ee4cce5/4a5f644a-1670-4c6b-a762-f87f1ee4cce5.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 831.332832] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-921b33b5-9b70-4458-a86e-cb100fc21891 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.341872] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 831.341872] env[62070]: value = "task-1121790" [ 831.341872] env[62070]: _type = "Task" [ 831.341872] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.351174] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1121790, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.362740] env[62070]: DEBUG oslo_concurrency.lockutils [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "fb054a32-c1aa-4884-a087-da5ad34cf3c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.362987] env[62070]: DEBUG oslo_concurrency.lockutils [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "fb054a32-c1aa-4884-a087-da5ad34cf3c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.435580] env[62070]: DEBUG nova.compute.manager [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 831.504358] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Releasing lock "refresh_cache-e74fd58c-cfa8-45c4-8f02-96234b4a9192" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.504737] env[62070]: DEBUG nova.compute.manager [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Instance network_info: |[{"id": "6444a30f-9c50-4eaf-b562-178b627dc0f1", "address": "fa:16:3e:ec:99:7c", "network": {"id": "08004b49-dbc2-4186-9e28-4268e947e8ee", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2022236674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2ecbd5f22c024de8a6b1c45096cb79a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6444a30f-9c", "ovs_interfaceid": "6444a30f-9c50-4eaf-b562-178b627dc0f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 831.505201] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:99:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e41c97-4d75-4041-ae71-321e7e9d480b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6444a30f-9c50-4eaf-b562-178b627dc0f1', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 831.513445] env[62070]: DEBUG oslo.service.loopingcall [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 831.513679] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 831.513907] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a018bf14-ba20-4576-bbc8-ced9db394ed2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.531989] env[62070]: DEBUG nova.compute.manager [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 831.540817] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 831.540817] env[62070]: value = "task-1121791" [ 831.540817] env[62070]: _type = "Task" [ 831.540817] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.550340] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121791, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.562158] env[62070]: DEBUG nova.virt.hardware [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 831.562158] env[62070]: DEBUG nova.virt.hardware [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 831.562158] env[62070]: DEBUG nova.virt.hardware [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 831.562158] env[62070]: DEBUG nova.virt.hardware [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 831.562158] env[62070]: DEBUG nova.virt.hardware [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 831.562884] env[62070]: DEBUG nova.virt.hardware [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 831.563261] env[62070]: DEBUG nova.virt.hardware [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 831.563561] env[62070]: DEBUG nova.virt.hardware [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 831.564000] env[62070]: DEBUG nova.virt.hardware [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 831.564343] env[62070]: DEBUG nova.virt.hardware [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 831.564851] env[62070]: DEBUG nova.virt.hardware [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 831.566364] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3dc1d7f-0132-402c-bb31-a117629b6bf1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.579231] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae4290e5-cc56-432b-bd3a-59956e331de4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.661400] env[62070]: ERROR nova.scheduler.client.report [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [req-4584d72f-5525-4b83-9734-122f79df1fc6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 21c7c111-1b69-4468-b2c4-5dd96014fbd6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4584d72f-5525-4b83-9734-122f79df1fc6"}]} [ 831.691862] env[62070]: DEBUG nova.scheduler.client.report [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Refreshing inventories for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 831.695364] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 831.701099] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] VM already powered off {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 831.701336] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 831.701587] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.701739] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.701919] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 831.702274] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb2030b4-81c1-4bed-b81d-8dc66685c5f8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.717944] env[62070]: DEBUG nova.scheduler.client.report [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Updating ProviderTree inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 831.718210] env[62070]: DEBUG nova.compute.provider_tree [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 831.723281] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 831.723497] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 831.729221] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-681dc1a9-8599-466f-8e12-e0314e866f87 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.741700] env[62070]: DEBUG oslo_vmware.api [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121784, 'name': RemoveSnapshot_Task, 'duration_secs': 1.648067} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.742768] env[62070]: DEBUG nova.scheduler.client.report [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Refreshing aggregate associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, aggregates: None {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 831.747655] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Deleted Snapshot of the VM instance {{(pid=62070) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 831.747835] env[62070]: INFO nova.compute.manager [None req-80e54eb4-2bdf-469f-a028-5ebd9367702b tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Took 18.39 seconds to snapshot the instance on the hypervisor. [ 831.753884] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 831.753884] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ac4cc7-0d08-f7b2-d9f0-c17a06c7fb02" [ 831.753884] env[62070]: _type = "Task" [ 831.753884] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.765536] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52ac4cc7-0d08-f7b2-d9f0-c17a06c7fb02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.779758] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.781224] env[62070]: DEBUG nova.scheduler.client.report [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Refreshing trait associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 831.807184] env[62070]: DEBUG nova.network.neutron [-] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.821368] env[62070]: DEBUG oslo_vmware.api [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121788, 'name': Destroy_Task, 'duration_secs': 0.544872} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.822123] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Destroyed the VM [ 831.822880] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Deleting Snapshot of the VM instance {{(pid=62070) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 831.823848] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5d5ba441-a968-46fb-bd5f-1ebf886dea2d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.836017] env[62070]: DEBUG oslo_vmware.api [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 831.836017] env[62070]: value = "task-1121792" [ 831.836017] env[62070]: _type = "Task" [ 831.836017] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.851873] env[62070]: DEBUG oslo_vmware.api [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121792, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.859111] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1121790, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.868750] env[62070]: DEBUG nova.compute.manager [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 831.966589] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.054653] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121791, 'name': CreateVM_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.243967] env[62070]: DEBUG oslo_concurrency.lockutils [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Acquiring lock "d0914f90-200c-4715-aaab-54beacf339b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.244293] env[62070]: DEBUG oslo_concurrency.lockutils [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Lock "d0914f90-200c-4715-aaab-54beacf339b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.244643] env[62070]: DEBUG oslo_concurrency.lockutils [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Acquiring lock "d0914f90-200c-4715-aaab-54beacf339b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.244785] env[62070]: DEBUG oslo_concurrency.lockutils [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Lock "d0914f90-200c-4715-aaab-54beacf339b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.244955] env[62070]: DEBUG oslo_concurrency.lockutils [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Lock "d0914f90-200c-4715-aaab-54beacf339b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.247606] env[62070]: INFO nova.compute.manager [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Terminating instance [ 832.250165] env[62070]: DEBUG nova.compute.manager [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 832.250386] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 832.251377] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04103dc4-ecc8-4e89-afef-2cbc7e08aa22 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.268766] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 832.269455] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a64340ef-d9f5-4f6a-8a27-e74a53a20210 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.275980] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52ac4cc7-0d08-f7b2-d9f0-c17a06c7fb02, 'name': SearchDatastore_Task, 'duration_secs': 0.034865} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.276915] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4076049-0f40-4718-8a30-71778f2a93f1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.281555] env[62070]: DEBUG oslo_vmware.api [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Waiting for the task: (returnval){ [ 832.281555] env[62070]: value = "task-1121793" [ 832.281555] env[62070]: _type = "Task" [ 832.281555] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.295883] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 832.295883] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5292bda5-7277-76ac-e27e-920902de09bf" [ 832.295883] env[62070]: _type = "Task" [ 832.295883] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.307828] env[62070]: DEBUG oslo_vmware.api [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Task: {'id': task-1121793, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.314986] env[62070]: INFO nova.compute.manager [-] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Took 1.46 seconds to deallocate network for instance. [ 832.315371] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5292bda5-7277-76ac-e27e-920902de09bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.316158] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc16660b-dd10-4ead-ad6d-28dc27fd84f1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.330678] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae5fa4d7-3a95-41a2-913e-6e885122bbde {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.344707] env[62070]: DEBUG oslo_vmware.api [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121792, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.378297] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74886d2d-4eb5-43cb-8298-59ef42bb4196 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.392219] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1121790, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.395768] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d12a0ff0-69e4-4bee-8761-ed7341e0df3e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.401030] env[62070]: DEBUG oslo_concurrency.lockutils [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.413573] env[62070]: DEBUG nova.compute.provider_tree [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 832.553596] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121791, 'name': CreateVM_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.559506] env[62070]: DEBUG nova.network.neutron [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Successfully updated port: 0f1aed27-115c-407c-b787-21d92045c9fd {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 832.607566] env[62070]: DEBUG nova.compute.manager [req-e31ee6cc-b663-4014-b684-57f9794b4afd req-6f5b2328-190a-4bf1-89b3-77219919098b service nova] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Received event network-changed-3a8213ef-a979-487a-8756-7bfecdf4ba10 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 832.608071] env[62070]: DEBUG nova.compute.manager [req-e31ee6cc-b663-4014-b684-57f9794b4afd req-6f5b2328-190a-4bf1-89b3-77219919098b service nova] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Refreshing instance network info cache due to event network-changed-3a8213ef-a979-487a-8756-7bfecdf4ba10. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 832.608391] env[62070]: DEBUG oslo_concurrency.lockutils [req-e31ee6cc-b663-4014-b684-57f9794b4afd req-6f5b2328-190a-4bf1-89b3-77219919098b service nova] Acquiring lock "refresh_cache-4a5f644a-1670-4c6b-a762-f87f1ee4cce5" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.609474] env[62070]: DEBUG oslo_concurrency.lockutils [req-e31ee6cc-b663-4014-b684-57f9794b4afd req-6f5b2328-190a-4bf1-89b3-77219919098b service nova] Acquired lock "refresh_cache-4a5f644a-1670-4c6b-a762-f87f1ee4cce5" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.612714] env[62070]: DEBUG nova.network.neutron [req-e31ee6cc-b663-4014-b684-57f9794b4afd req-6f5b2328-190a-4bf1-89b3-77219919098b service nova] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Refreshing network info cache for port 3a8213ef-a979-487a-8756-7bfecdf4ba10 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 832.798864] env[62070]: DEBUG oslo_vmware.api [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Task: {'id': task-1121793, 'name': PowerOffVM_Task, 'duration_secs': 0.249642} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.802754] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 832.803038] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 832.803594] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e0168f6-7e39-4c94-8a09-cb8fad6177eb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.815451] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5292bda5-7277-76ac-e27e-920902de09bf, 'name': SearchDatastore_Task, 'duration_secs': 0.057271} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.815852] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.816211] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 5ec9074b-1237-4404-b13c-a7ca0dbe1d43/43ea607c-7ece-4601-9b11-75c6a16aa7dd-rescue.vmdk. {{(pid=62070) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 832.816577] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d7f47280-9325-4089-8b4f-ef14b717ff9f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.826196] env[62070]: DEBUG oslo_concurrency.lockutils [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.828439] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 832.828439] env[62070]: value = "task-1121795" [ 832.828439] env[62070]: _type = "Task" [ 832.828439] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.838175] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121795, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.847200] env[62070]: DEBUG oslo_vmware.api [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121792, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.859031] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1121790, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.078632} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.859307] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 4a5f644a-1670-4c6b-a762-f87f1ee4cce5/4a5f644a-1670-4c6b-a762-f87f1ee4cce5.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 832.859407] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 832.859656] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e4e4c629-9f03-499c-8e5c-a2796e81fcf0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.868580] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 832.868580] env[62070]: value = "task-1121796" [ 832.868580] env[62070]: _type = "Task" [ 832.868580] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.881135] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1121796, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.899380] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 832.899380] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 832.899380] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Deleting the datastore file [datastore2] d0914f90-200c-4715-aaab-54beacf339b9 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 832.899380] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-87321216-8c51-429d-85e6-e1d18aa4139a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.908069] env[62070]: DEBUG oslo_vmware.api [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Waiting for the task: (returnval){ [ 832.908069] env[62070]: value = "task-1121797" [ 832.908069] env[62070]: _type = "Task" [ 832.908069] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.924250] env[62070]: DEBUG oslo_vmware.api [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Task: {'id': task-1121797, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.937808] env[62070]: ERROR nova.scheduler.client.report [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [req-c9f1bea3-56c7-448c-975f-3a9af41dc6ad] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 21c7c111-1b69-4468-b2c4-5dd96014fbd6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c9f1bea3-56c7-448c-975f-3a9af41dc6ad"}]} [ 832.973124] env[62070]: DEBUG nova.scheduler.client.report [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Refreshing inventories for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 832.991463] env[62070]: DEBUG nova.scheduler.client.report [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Updating ProviderTree inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 832.991721] env[62070]: DEBUG nova.compute.provider_tree [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 833.012323] env[62070]: DEBUG nova.scheduler.client.report [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Refreshing aggregate associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, aggregates: None {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 833.040170] env[62070]: DEBUG nova.scheduler.client.report [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Refreshing trait associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 833.057871] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121791, 'name': CreateVM_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.062722] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "refresh_cache-65fe3720-95cb-4620-b1c7-eae9e3bc3943" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.062917] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired lock "refresh_cache-65fe3720-95cb-4620-b1c7-eae9e3bc3943" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.063272] env[62070]: DEBUG nova.network.neutron [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 833.364942] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121795, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.372918] env[62070]: DEBUG oslo_vmware.api [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121792, 'name': RemoveSnapshot_Task, 'duration_secs': 1.262559} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.378596] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Deleted Snapshot of the VM instance {{(pid=62070) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 833.378596] env[62070]: INFO nova.compute.manager [None req-f077bfde-40ac-4e3c-a213-ff8fe8aed011 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Took 17.66 seconds to snapshot the instance on the hypervisor. [ 833.393267] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1121796, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07429} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.396583] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 833.398544] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-820a2d56-6519-4859-84aa-483fb73be8c6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.433347] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 4a5f644a-1670-4c6b-a762-f87f1ee4cce5/4a5f644a-1670-4c6b-a762-f87f1ee4cce5.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 833.439881] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5fb86e7f-933c-4597-a9d6-ab50f5dba304 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.469221] env[62070]: DEBUG oslo_vmware.api [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Task: {'id': task-1121797, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.262246} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.469818] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 833.470049] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 833.470271] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 833.470462] env[62070]: INFO nova.compute.manager [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Took 1.22 seconds to destroy the instance on the hypervisor. [ 833.471255] env[62070]: DEBUG oslo.service.loopingcall [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 833.471350] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 833.471350] env[62070]: value = "task-1121798" [ 833.471350] env[62070]: _type = "Task" [ 833.471350] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.471579] env[62070]: DEBUG nova.compute.manager [-] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 833.471619] env[62070]: DEBUG nova.network.neutron [-] [instance: d0914f90-200c-4715-aaab-54beacf339b9] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 833.481677] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "58146b84-7589-4f21-bdab-605cee535e55" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.481921] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "58146b84-7589-4f21-bdab-605cee535e55" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.490147] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1121798, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.558358] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121791, 'name': CreateVM_Task, 'duration_secs': 1.669454} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.558358] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 833.559654] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.559654] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.559654] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 833.559969] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11c08986-af79-4062-b385-c6bddc1d5276 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.567669] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 833.567669] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52a56721-0470-eadb-0458-5815be98b7a6" [ 833.567669] env[62070]: _type = "Task" [ 833.567669] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.580324] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52a56721-0470-eadb-0458-5815be98b7a6, 'name': SearchDatastore_Task, 'duration_secs': 0.011887} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.580635] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.580880] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 833.581127] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.581269] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.581454] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 833.584045] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f2656b1d-b87f-4e30-a5ab-96fc630bbd75 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.594539] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 833.594769] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 833.599020] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9eba46bb-6e1a-466c-ba10-35a0028d9897 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.605898] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 833.605898] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52738a6e-5525-7cc3-d3b4-44679997326e" [ 833.605898] env[62070]: _type = "Task" [ 833.605898] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.616498] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52738a6e-5525-7cc3-d3b4-44679997326e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.635840] env[62070]: DEBUG nova.network.neutron [req-e31ee6cc-b663-4014-b684-57f9794b4afd req-6f5b2328-190a-4bf1-89b3-77219919098b service nova] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Updated VIF entry in instance network info cache for port 3a8213ef-a979-487a-8756-7bfecdf4ba10. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 833.636580] env[62070]: DEBUG nova.network.neutron [req-e31ee6cc-b663-4014-b684-57f9794b4afd req-6f5b2328-190a-4bf1-89b3-77219919098b service nova] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Updating instance_info_cache with network_info: [{"id": "3a8213ef-a979-487a-8756-7bfecdf4ba10", "address": "fa:16:3e:25:2e:9f", "network": {"id": "443d2d62-bcef-44b2-814a-3e5dc50abc04", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-772061432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85c0cc8e0f544bfbb76970d3123fbb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a8213ef-a9", "ovs_interfaceid": "3a8213ef-a979-487a-8756-7bfecdf4ba10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.676254] env[62070]: DEBUG nova.network.neutron [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 833.713390] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4140c7f-4323-47b4-87fd-7292d07973ea {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.723732] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f81dfa2e-7b51-44b6-b1da-bbf325ba7fa2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.764784] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32dd8ae8-c623-4cf3-8883-6c2e6318e0fe {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.773955] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-623fd166-0526-4978-bccc-d05e3eda58b5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.790127] env[62070]: DEBUG nova.compute.provider_tree [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 833.839870] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121795, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.608282} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.840166] env[62070]: INFO nova.virt.vmwareapi.ds_util [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 5ec9074b-1237-4404-b13c-a7ca0dbe1d43/43ea607c-7ece-4601-9b11-75c6a16aa7dd-rescue.vmdk. [ 833.840845] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2853d8cf-0c4a-4664-b915-7ccd5718891b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.868749] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 5ec9074b-1237-4404-b13c-a7ca0dbe1d43/43ea607c-7ece-4601-9b11-75c6a16aa7dd-rescue.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 833.869104] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f7fefa8a-2c82-4d2d-bd4c-33ffeebf8a07 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.894132] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 833.894132] env[62070]: value = "task-1121799" [ 833.894132] env[62070]: _type = "Task" [ 833.894132] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.903769] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121799, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.984645] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1121798, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.088360] env[62070]: DEBUG nova.network.neutron [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Updating instance_info_cache with network_info: [{"id": "0f1aed27-115c-407c-b787-21d92045c9fd", "address": "fa:16:3e:dd:b3:fe", "network": {"id": "516790be-56b8-409d-b1c0-a8683a45a9ec", "bridge": "br-int", "label": "tempest-ServersTestJSON-693737631-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91e246e32f29422e90fae974cfee9d8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f1aed27-11", "ovs_interfaceid": "0f1aed27-115c-407c-b787-21d92045c9fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.124684] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52738a6e-5525-7cc3-d3b4-44679997326e, 'name': SearchDatastore_Task, 'duration_secs': 0.010638} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.127140] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2598e213-5405-4f32-ad54-3875607610a0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.135418] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 834.135418] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5269e5ae-1180-3421-0a13-c4c94fb0e701" [ 834.135418] env[62070]: _type = "Task" [ 834.135418] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.139150] env[62070]: DEBUG oslo_concurrency.lockutils [req-e31ee6cc-b663-4014-b684-57f9794b4afd req-6f5b2328-190a-4bf1-89b3-77219919098b service nova] Releasing lock "refresh_cache-4a5f644a-1670-4c6b-a762-f87f1ee4cce5" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.139442] env[62070]: DEBUG nova.compute.manager [req-e31ee6cc-b663-4014-b684-57f9794b4afd req-6f5b2328-190a-4bf1-89b3-77219919098b service nova] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Received event network-vif-plugged-6444a30f-9c50-4eaf-b562-178b627dc0f1 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 834.139655] env[62070]: DEBUG oslo_concurrency.lockutils [req-e31ee6cc-b663-4014-b684-57f9794b4afd req-6f5b2328-190a-4bf1-89b3-77219919098b service nova] Acquiring lock "e74fd58c-cfa8-45c4-8f02-96234b4a9192-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.139872] env[62070]: DEBUG oslo_concurrency.lockutils [req-e31ee6cc-b663-4014-b684-57f9794b4afd req-6f5b2328-190a-4bf1-89b3-77219919098b service nova] Lock "e74fd58c-cfa8-45c4-8f02-96234b4a9192-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.140098] env[62070]: DEBUG oslo_concurrency.lockutils [req-e31ee6cc-b663-4014-b684-57f9794b4afd req-6f5b2328-190a-4bf1-89b3-77219919098b service nova] Lock "e74fd58c-cfa8-45c4-8f02-96234b4a9192-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.140876] env[62070]: DEBUG nova.compute.manager [req-e31ee6cc-b663-4014-b684-57f9794b4afd req-6f5b2328-190a-4bf1-89b3-77219919098b service nova] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] No waiting events found dispatching network-vif-plugged-6444a30f-9c50-4eaf-b562-178b627dc0f1 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 834.140876] env[62070]: WARNING nova.compute.manager [req-e31ee6cc-b663-4014-b684-57f9794b4afd req-6f5b2328-190a-4bf1-89b3-77219919098b service nova] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Received unexpected event network-vif-plugged-6444a30f-9c50-4eaf-b562-178b627dc0f1 for instance with vm_state building and task_state spawning. [ 834.140876] env[62070]: DEBUG nova.compute.manager [req-e31ee6cc-b663-4014-b684-57f9794b4afd req-6f5b2328-190a-4bf1-89b3-77219919098b service nova] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Received event network-changed-6444a30f-9c50-4eaf-b562-178b627dc0f1 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 834.140876] env[62070]: DEBUG nova.compute.manager [req-e31ee6cc-b663-4014-b684-57f9794b4afd req-6f5b2328-190a-4bf1-89b3-77219919098b service nova] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Refreshing instance network info cache due to event network-changed-6444a30f-9c50-4eaf-b562-178b627dc0f1. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 834.141045] env[62070]: DEBUG oslo_concurrency.lockutils [req-e31ee6cc-b663-4014-b684-57f9794b4afd req-6f5b2328-190a-4bf1-89b3-77219919098b service nova] Acquiring lock "refresh_cache-e74fd58c-cfa8-45c4-8f02-96234b4a9192" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.141108] env[62070]: DEBUG oslo_concurrency.lockutils [req-e31ee6cc-b663-4014-b684-57f9794b4afd req-6f5b2328-190a-4bf1-89b3-77219919098b service nova] Acquired lock "refresh_cache-e74fd58c-cfa8-45c4-8f02-96234b4a9192" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.141273] env[62070]: DEBUG nova.network.neutron [req-e31ee6cc-b663-4014-b684-57f9794b4afd req-6f5b2328-190a-4bf1-89b3-77219919098b service nova] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Refreshing network info cache for port 6444a30f-9c50-4eaf-b562-178b627dc0f1 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 834.151169] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5269e5ae-1180-3421-0a13-c4c94fb0e701, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.329235] env[62070]: DEBUG nova.scheduler.client.report [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Updated inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with generation 89 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 834.329516] env[62070]: DEBUG nova.compute.provider_tree [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Updating resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 generation from 89 to 90 during operation: update_inventory {{(pid=62070) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 834.329728] env[62070]: DEBUG nova.compute.provider_tree [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 834.409304] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121799, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.486155] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1121798, 'name': ReconfigVM_Task, 'duration_secs': 0.734729} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.486567] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 4a5f644a-1670-4c6b-a762-f87f1ee4cce5/4a5f644a-1670-4c6b-a762-f87f1ee4cce5.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 834.487339] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2c1bb43d-b7aa-4d21-a89a-1daf141c22e1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.494966] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 834.494966] env[62070]: value = "task-1121800" [ 834.494966] env[62070]: _type = "Task" [ 834.494966] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.505266] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1121800, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.591441] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Releasing lock "refresh_cache-65fe3720-95cb-4620-b1c7-eae9e3bc3943" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.591806] env[62070]: DEBUG nova.compute.manager [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Instance network_info: |[{"id": "0f1aed27-115c-407c-b787-21d92045c9fd", "address": "fa:16:3e:dd:b3:fe", "network": {"id": "516790be-56b8-409d-b1c0-a8683a45a9ec", "bridge": "br-int", "label": "tempest-ServersTestJSON-693737631-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91e246e32f29422e90fae974cfee9d8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f1aed27-11", "ovs_interfaceid": "0f1aed27-115c-407c-b787-21d92045c9fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 834.592342] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:b3:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '359850cc-b061-4c9c-a61c-eb42e0f7c359', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f1aed27-115c-407c-b787-21d92045c9fd', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 834.599922] env[62070]: DEBUG oslo.service.loopingcall [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 834.600276] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 834.600557] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-313c9271-ebe8-4f56-bb79-b312da9757b9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.625236] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 834.625236] env[62070]: value = "task-1121801" [ 834.625236] env[62070]: _type = "Task" [ 834.625236] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.635191] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121801, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.646373] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5269e5ae-1180-3421-0a13-c4c94fb0e701, 'name': SearchDatastore_Task, 'duration_secs': 0.018567} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.647514] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.647802] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] e74fd58c-cfa8-45c4-8f02-96234b4a9192/e74fd58c-cfa8-45c4-8f02-96234b4a9192.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 834.650207] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-58c40a6e-03b1-42ac-954b-93d3ad0c4012 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.659409] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 834.659409] env[62070]: value = "task-1121802" [ 834.659409] env[62070]: _type = "Task" [ 834.659409] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.669768] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121802, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.836215] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.341s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.839314] env[62070]: DEBUG nova.compute.manager [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 834.843186] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.841s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.843186] env[62070]: INFO nova.compute.claims [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 834.861292] env[62070]: DEBUG nova.network.neutron [-] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.911162] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121799, 'name': ReconfigVM_Task, 'duration_secs': 0.772544} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.911478] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 5ec9074b-1237-4404-b13c-a7ca0dbe1d43/43ea607c-7ece-4601-9b11-75c6a16aa7dd-rescue.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 834.912400] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb2e0a22-5f9c-4170-b252-39cbb193481e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.945394] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ee04078-ab1d-4d8d-9164-2be7c99b0ecb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.966841] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 834.966841] env[62070]: value = "task-1121803" [ 834.966841] env[62070]: _type = "Task" [ 834.966841] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.980137] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121803, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.008422] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1121800, 'name': Rename_Task, 'duration_secs': 0.173629} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.011106] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 835.012753] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-61fd6d3a-098d-4181-b8d1-0b1318188c73 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.023507] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 835.023507] env[62070]: value = "task-1121804" [ 835.023507] env[62070]: _type = "Task" [ 835.023507] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.043326] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1121804, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.136626] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121801, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.173588] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121802, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.299905] env[62070]: DEBUG nova.network.neutron [req-e31ee6cc-b663-4014-b684-57f9794b4afd req-6f5b2328-190a-4bf1-89b3-77219919098b service nova] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Updated VIF entry in instance network info cache for port 6444a30f-9c50-4eaf-b562-178b627dc0f1. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 835.301099] env[62070]: DEBUG nova.network.neutron [req-e31ee6cc-b663-4014-b684-57f9794b4afd req-6f5b2328-190a-4bf1-89b3-77219919098b service nova] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Updating instance_info_cache with network_info: [{"id": "6444a30f-9c50-4eaf-b562-178b627dc0f1", "address": "fa:16:3e:ec:99:7c", "network": {"id": "08004b49-dbc2-4186-9e28-4268e947e8ee", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2022236674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2ecbd5f22c024de8a6b1c45096cb79a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6444a30f-9c", "ovs_interfaceid": "6444a30f-9c50-4eaf-b562-178b627dc0f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.342494] env[62070]: DEBUG nova.compute.utils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 835.344444] env[62070]: DEBUG nova.compute.manager [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 835.344632] env[62070]: DEBUG nova.network.neutron [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 835.353735] env[62070]: DEBUG nova.compute.manager [req-f93aaaa2-70d1-4e4d-9294-1a812d253409 req-27a614cd-b585-4650-8a7f-4d46bbc53a60 service nova] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Received event network-vif-plugged-0f1aed27-115c-407c-b787-21d92045c9fd {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 835.353984] env[62070]: DEBUG oslo_concurrency.lockutils [req-f93aaaa2-70d1-4e4d-9294-1a812d253409 req-27a614cd-b585-4650-8a7f-4d46bbc53a60 service nova] Acquiring lock "65fe3720-95cb-4620-b1c7-eae9e3bc3943-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.354248] env[62070]: DEBUG oslo_concurrency.lockutils [req-f93aaaa2-70d1-4e4d-9294-1a812d253409 req-27a614cd-b585-4650-8a7f-4d46bbc53a60 service nova] Lock "65fe3720-95cb-4620-b1c7-eae9e3bc3943-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.354456] env[62070]: DEBUG oslo_concurrency.lockutils [req-f93aaaa2-70d1-4e4d-9294-1a812d253409 req-27a614cd-b585-4650-8a7f-4d46bbc53a60 service nova] Lock "65fe3720-95cb-4620-b1c7-eae9e3bc3943-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.354653] env[62070]: DEBUG nova.compute.manager [req-f93aaaa2-70d1-4e4d-9294-1a812d253409 req-27a614cd-b585-4650-8a7f-4d46bbc53a60 service nova] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] No waiting events found dispatching network-vif-plugged-0f1aed27-115c-407c-b787-21d92045c9fd {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 835.354923] env[62070]: WARNING nova.compute.manager [req-f93aaaa2-70d1-4e4d-9294-1a812d253409 req-27a614cd-b585-4650-8a7f-4d46bbc53a60 service nova] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Received unexpected event network-vif-plugged-0f1aed27-115c-407c-b787-21d92045c9fd for instance with vm_state building and task_state spawning. [ 835.355121] env[62070]: DEBUG nova.compute.manager [req-f93aaaa2-70d1-4e4d-9294-1a812d253409 req-27a614cd-b585-4650-8a7f-4d46bbc53a60 service nova] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Received event network-changed-0f1aed27-115c-407c-b787-21d92045c9fd {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 835.355286] env[62070]: DEBUG nova.compute.manager [req-f93aaaa2-70d1-4e4d-9294-1a812d253409 req-27a614cd-b585-4650-8a7f-4d46bbc53a60 service nova] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Refreshing instance network info cache due to event network-changed-0f1aed27-115c-407c-b787-21d92045c9fd. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 835.357058] env[62070]: DEBUG oslo_concurrency.lockutils [req-f93aaaa2-70d1-4e4d-9294-1a812d253409 req-27a614cd-b585-4650-8a7f-4d46bbc53a60 service nova] Acquiring lock "refresh_cache-65fe3720-95cb-4620-b1c7-eae9e3bc3943" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.357264] env[62070]: DEBUG oslo_concurrency.lockutils [req-f93aaaa2-70d1-4e4d-9294-1a812d253409 req-27a614cd-b585-4650-8a7f-4d46bbc53a60 service nova] Acquired lock "refresh_cache-65fe3720-95cb-4620-b1c7-eae9e3bc3943" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.357467] env[62070]: DEBUG nova.network.neutron [req-f93aaaa2-70d1-4e4d-9294-1a812d253409 req-27a614cd-b585-4650-8a7f-4d46bbc53a60 service nova] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Refreshing network info cache for port 0f1aed27-115c-407c-b787-21d92045c9fd {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 835.365110] env[62070]: INFO nova.compute.manager [-] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Took 1.89 seconds to deallocate network for instance. [ 835.406871] env[62070]: DEBUG nova.policy [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a0162190099744eba0d646a05de23435', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3e9dab208bda46418b994df4359da404', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 835.479675] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121803, 'name': ReconfigVM_Task, 'duration_secs': 0.227457} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.480014] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 835.480288] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8b1e3a52-c1f4-4626-8429-cf6af4cf843b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.489238] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 835.489238] env[62070]: value = "task-1121805" [ 835.489238] env[62070]: _type = "Task" [ 835.489238] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.499674] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121805, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.536455] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1121804, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.636483] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121801, 'name': CreateVM_Task, 'duration_secs': 0.514295} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.636672] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 835.637391] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.637549] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.637885] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 835.638172] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a213039a-8998-45b4-aef0-e5a5b4cf3f66 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.644921] env[62070]: DEBUG oslo_vmware.api [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 835.644921] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ac6a1c-2434-db85-ee5b-4088458ec217" [ 835.644921] env[62070]: _type = "Task" [ 835.644921] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.655129] env[62070]: DEBUG oslo_vmware.api [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52ac6a1c-2434-db85-ee5b-4088458ec217, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.669717] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121802, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.601256} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.669996] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] e74fd58c-cfa8-45c4-8f02-96234b4a9192/e74fd58c-cfa8-45c4-8f02-96234b4a9192.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 835.670675] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 835.670675] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dbe67c88-06e7-492f-99f3-7962ba347f55 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.679213] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 835.679213] env[62070]: value = "task-1121806" [ 835.679213] env[62070]: _type = "Task" [ 835.679213] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.689027] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121806, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.803987] env[62070]: DEBUG oslo_concurrency.lockutils [req-e31ee6cc-b663-4014-b684-57f9794b4afd req-6f5b2328-190a-4bf1-89b3-77219919098b service nova] Releasing lock "refresh_cache-e74fd58c-cfa8-45c4-8f02-96234b4a9192" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.804154] env[62070]: DEBUG nova.compute.manager [req-e31ee6cc-b663-4014-b684-57f9794b4afd req-6f5b2328-190a-4bf1-89b3-77219919098b service nova] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Received event network-vif-deleted-8ed3d649-bc61-493f-b8e9-2e7f7fad49ed {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 835.806508] env[62070]: DEBUG nova.network.neutron [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Successfully created port: ec5674a0-9a0f-48f3-ad88-00fe5f326e8c {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 835.850025] env[62070]: DEBUG nova.compute.manager [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 835.884517] env[62070]: DEBUG oslo_concurrency.lockutils [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.006177] env[62070]: DEBUG oslo_vmware.api [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121805, 'name': PowerOnVM_Task, 'duration_secs': 0.473517} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.008024] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 836.010396] env[62070]: DEBUG nova.compute.manager [None req-ebb5909b-d360-4ca0-af63-0f75ec534104 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 836.011471] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a7e69e-09be-4f51-beb0-85f14776b3a8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.041627] env[62070]: DEBUG oslo_vmware.api [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1121804, 'name': PowerOnVM_Task, 'duration_secs': 0.558334} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.042618] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 836.042618] env[62070]: INFO nova.compute.manager [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Took 14.95 seconds to spawn the instance on the hypervisor. [ 836.042618] env[62070]: DEBUG nova.compute.manager [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 836.043440] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d98e2869-ec9c-4189-817e-2b7d3adc8544 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.157460] env[62070]: DEBUG oslo_vmware.api [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52ac6a1c-2434-db85-ee5b-4088458ec217, 'name': SearchDatastore_Task, 'duration_secs': 0.011291} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.160302] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 836.160606] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 836.160850] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.161018] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.161450] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 836.161874] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d4024b6-6d91-4ebe-bbc4-549bb01dc814 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.172837] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 836.173052] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 836.174204] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9237af0d-02f5-4fec-a5c2-95977c6cda0f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.186775] env[62070]: DEBUG oslo_vmware.api [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 836.186775] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5216ecb4-8be4-aae9-ef24-27f68ae4a09e" [ 836.186775] env[62070]: _type = "Task" [ 836.186775] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.193487] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121806, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091412} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.194192] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 836.195226] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-730e7c49-ba8c-4da3-aa0a-578a16ea5f98 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.205126] env[62070]: DEBUG oslo_vmware.api [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5216ecb4-8be4-aae9-ef24-27f68ae4a09e, 'name': SearchDatastore_Task, 'duration_secs': 0.010373} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.206448] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62410694-3f91-4f05-9f24-27ad1d57baeb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.227901] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] e74fd58c-cfa8-45c4-8f02-96234b4a9192/e74fd58c-cfa8-45c4-8f02-96234b4a9192.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 836.231666] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49014a9c-9269-42e2-b3f2-762f08955729 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.248292] env[62070]: DEBUG oslo_vmware.api [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 836.248292] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ca0bab-0321-5089-baad-e3dc548d5ef3" [ 836.248292] env[62070]: _type = "Task" [ 836.248292] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.253894] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 836.253894] env[62070]: value = "task-1121807" [ 836.253894] env[62070]: _type = "Task" [ 836.253894] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.257706] env[62070]: DEBUG oslo_vmware.api [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52ca0bab-0321-5089-baad-e3dc548d5ef3, 'name': SearchDatastore_Task, 'duration_secs': 0.009821} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.263321] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 836.263606] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 65fe3720-95cb-4620-b1c7-eae9e3bc3943/65fe3720-95cb-4620-b1c7-eae9e3bc3943.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 836.264093] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7a2ecafe-2934-453e-a87b-7163c99c76f8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.272253] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121807, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.273583] env[62070]: DEBUG oslo_vmware.api [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 836.273583] env[62070]: value = "task-1121808" [ 836.273583] env[62070]: _type = "Task" [ 836.273583] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.286573] env[62070]: DEBUG oslo_vmware.api [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121808, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.399943] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b248b360-ae5a-4c5f-ae4b-ad478b5a93d1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.410603] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b07f642-35e1-4e9e-bf06-eabc03bf93fa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.457649] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d4ff56a-c577-494e-92cd-5b36ff2d5ed3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.467179] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de56c8f2-9d8f-428a-a9cf-1478f3cca0db {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.483598] env[62070]: DEBUG nova.compute.provider_tree [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 836.554077] env[62070]: DEBUG nova.network.neutron [req-f93aaaa2-70d1-4e4d-9294-1a812d253409 req-27a614cd-b585-4650-8a7f-4d46bbc53a60 service nova] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Updated VIF entry in instance network info cache for port 0f1aed27-115c-407c-b787-21d92045c9fd. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 836.554675] env[62070]: DEBUG nova.network.neutron [req-f93aaaa2-70d1-4e4d-9294-1a812d253409 req-27a614cd-b585-4650-8a7f-4d46bbc53a60 service nova] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Updating instance_info_cache with network_info: [{"id": "0f1aed27-115c-407c-b787-21d92045c9fd", "address": "fa:16:3e:dd:b3:fe", "network": {"id": "516790be-56b8-409d-b1c0-a8683a45a9ec", "bridge": "br-int", "label": "tempest-ServersTestJSON-693737631-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91e246e32f29422e90fae974cfee9d8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f1aed27-11", "ovs_interfaceid": "0f1aed27-115c-407c-b787-21d92045c9fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.569434] env[62070]: INFO nova.compute.manager [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Took 48.13 seconds to build instance. [ 836.771580] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121807, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.787814] env[62070]: DEBUG oslo_vmware.api [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121808, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.492999} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.788589] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 65fe3720-95cb-4620-b1c7-eae9e3bc3943/65fe3720-95cb-4620-b1c7-eae9e3bc3943.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 836.788733] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 836.788905] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3a3b618e-f5ed-4f5a-87af-b5b559865fce {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.798021] env[62070]: DEBUG oslo_vmware.api [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 836.798021] env[62070]: value = "task-1121809" [ 836.798021] env[62070]: _type = "Task" [ 836.798021] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.808929] env[62070]: DEBUG oslo_vmware.api [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121809, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.865661] env[62070]: DEBUG nova.compute.manager [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 836.900629] env[62070]: DEBUG nova.virt.hardware [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 836.900911] env[62070]: DEBUG nova.virt.hardware [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 836.901097] env[62070]: DEBUG nova.virt.hardware [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 836.901291] env[62070]: DEBUG nova.virt.hardware [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 836.901477] env[62070]: DEBUG nova.virt.hardware [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 836.901640] env[62070]: DEBUG nova.virt.hardware [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 836.901859] env[62070]: DEBUG nova.virt.hardware [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 836.902039] env[62070]: DEBUG nova.virt.hardware [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 836.902220] env[62070]: DEBUG nova.virt.hardware [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 836.902543] env[62070]: DEBUG nova.virt.hardware [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 836.902608] env[62070]: DEBUG nova.virt.hardware [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 836.903608] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13232db4-2335-44a5-bce3-a11c2c7c90f1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.912539] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a5db890-0722-4875-861a-748bdcc26f27 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.987195] env[62070]: DEBUG nova.scheduler.client.report [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 837.057778] env[62070]: DEBUG oslo_concurrency.lockutils [req-f93aaaa2-70d1-4e4d-9294-1a812d253409 req-27a614cd-b585-4650-8a7f-4d46bbc53a60 service nova] Releasing lock "refresh_cache-65fe3720-95cb-4620-b1c7-eae9e3bc3943" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.058448] env[62070]: DEBUG nova.compute.manager [req-f93aaaa2-70d1-4e4d-9294-1a812d253409 req-27a614cd-b585-4650-8a7f-4d46bbc53a60 service nova] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Received event network-vif-deleted-e23bf645-d900-4495-8917-316b3ab16ce6 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 837.071466] env[62070]: DEBUG oslo_concurrency.lockutils [None req-afd9a476-b061-4376-a74f-1ae1bdb543db tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 116.087s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.273581] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121807, 'name': ReconfigVM_Task, 'duration_secs': 0.556689} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.273972] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Reconfigured VM instance instance-00000040 to attach disk [datastore1] e74fd58c-cfa8-45c4-8f02-96234b4a9192/e74fd58c-cfa8-45c4-8f02-96234b4a9192.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 837.275008] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a8ff400f-8f92-4463-b4da-410b29fac7b4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.282839] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 837.282839] env[62070]: value = "task-1121810" [ 837.282839] env[62070]: _type = "Task" [ 837.282839] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.291788] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121810, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.305787] env[62070]: DEBUG oslo_vmware.api [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121809, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074763} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.306313] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 837.307255] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52e7d710-8dd6-4758-b8cc-82a36eeae7f9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.339109] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 65fe3720-95cb-4620-b1c7-eae9e3bc3943/65fe3720-95cb-4620-b1c7-eae9e3bc3943.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 837.339487] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7e63dd4-b0f9-4b2a-a2ba-2a44bec1c022 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.362418] env[62070]: DEBUG oslo_vmware.api [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 837.362418] env[62070]: value = "task-1121811" [ 837.362418] env[62070]: _type = "Task" [ 837.362418] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.373344] env[62070]: DEBUG oslo_vmware.api [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121811, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.492939] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.652s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.493537] env[62070]: DEBUG nova.compute.manager [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 837.497015] env[62070]: DEBUG oslo_concurrency.lockutils [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.293s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.497015] env[62070]: DEBUG oslo_concurrency.lockutils [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.499042] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.903s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.500795] env[62070]: INFO nova.compute.claims [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 837.541601] env[62070]: INFO nova.scheduler.client.report [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Deleted allocations for instance 748c94c7-1233-44f4-a71a-176b26518399 [ 837.576144] env[62070]: DEBUG nova.compute.manager [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 837.688066] env[62070]: DEBUG nova.compute.manager [req-ee71e1ef-3a83-4d46-b4d5-de73efa9d137 req-1d751673-ea55-445b-9516-a4ca3034d7eb service nova] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Received event network-changed-3a8213ef-a979-487a-8756-7bfecdf4ba10 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 837.688395] env[62070]: DEBUG nova.compute.manager [req-ee71e1ef-3a83-4d46-b4d5-de73efa9d137 req-1d751673-ea55-445b-9516-a4ca3034d7eb service nova] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Refreshing instance network info cache due to event network-changed-3a8213ef-a979-487a-8756-7bfecdf4ba10. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 837.688648] env[62070]: DEBUG oslo_concurrency.lockutils [req-ee71e1ef-3a83-4d46-b4d5-de73efa9d137 req-1d751673-ea55-445b-9516-a4ca3034d7eb service nova] Acquiring lock "refresh_cache-4a5f644a-1670-4c6b-a762-f87f1ee4cce5" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.688841] env[62070]: DEBUG oslo_concurrency.lockutils [req-ee71e1ef-3a83-4d46-b4d5-de73efa9d137 req-1d751673-ea55-445b-9516-a4ca3034d7eb service nova] Acquired lock "refresh_cache-4a5f644a-1670-4c6b-a762-f87f1ee4cce5" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.689083] env[62070]: DEBUG nova.network.neutron [req-ee71e1ef-3a83-4d46-b4d5-de73efa9d137 req-1d751673-ea55-445b-9516-a4ca3034d7eb service nova] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Refreshing network info cache for port 3a8213ef-a979-487a-8756-7bfecdf4ba10 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 837.801122] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121810, 'name': Rename_Task, 'duration_secs': 0.402976} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.801664] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 837.801859] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9187fa34-0eca-46d2-8c4b-ac8ee3b24452 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.812017] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 837.812017] env[62070]: value = "task-1121812" [ 837.812017] env[62070]: _type = "Task" [ 837.812017] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.824822] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121812, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.852837] env[62070]: DEBUG nova.network.neutron [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Successfully updated port: ec5674a0-9a0f-48f3-ad88-00fe5f326e8c {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 837.880271] env[62070]: DEBUG oslo_vmware.api [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121811, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.976736] env[62070]: DEBUG nova.compute.manager [req-ecaaa890-3f53-4052-9357-1227b9948f3c req-1081ea2f-3f07-4e12-b70e-fe676fae41e3 service nova] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Received event network-vif-plugged-ec5674a0-9a0f-48f3-ad88-00fe5f326e8c {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 837.976895] env[62070]: DEBUG oslo_concurrency.lockutils [req-ecaaa890-3f53-4052-9357-1227b9948f3c req-1081ea2f-3f07-4e12-b70e-fe676fae41e3 service nova] Acquiring lock "27987ff6-77c9-4876-8b39-dcc20ce4158a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.977551] env[62070]: DEBUG oslo_concurrency.lockutils [req-ecaaa890-3f53-4052-9357-1227b9948f3c req-1081ea2f-3f07-4e12-b70e-fe676fae41e3 service nova] Lock "27987ff6-77c9-4876-8b39-dcc20ce4158a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.977786] env[62070]: DEBUG oslo_concurrency.lockutils [req-ecaaa890-3f53-4052-9357-1227b9948f3c req-1081ea2f-3f07-4e12-b70e-fe676fae41e3 service nova] Lock "27987ff6-77c9-4876-8b39-dcc20ce4158a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.978259] env[62070]: DEBUG nova.compute.manager [req-ecaaa890-3f53-4052-9357-1227b9948f3c req-1081ea2f-3f07-4e12-b70e-fe676fae41e3 service nova] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] No waiting events found dispatching network-vif-plugged-ec5674a0-9a0f-48f3-ad88-00fe5f326e8c {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 837.978259] env[62070]: WARNING nova.compute.manager [req-ecaaa890-3f53-4052-9357-1227b9948f3c req-1081ea2f-3f07-4e12-b70e-fe676fae41e3 service nova] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Received unexpected event network-vif-plugged-ec5674a0-9a0f-48f3-ad88-00fe5f326e8c for instance with vm_state building and task_state spawning. [ 838.006116] env[62070]: DEBUG nova.compute.utils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 838.014465] env[62070]: DEBUG nova.compute.manager [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 838.016357] env[62070]: DEBUG nova.network.neutron [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 838.056633] env[62070]: DEBUG oslo_concurrency.lockutils [None req-08718df5-dcf0-4f5e-b998-717443ddf3b7 tempest-ServersTestFqdnHostnames-1065104165 tempest-ServersTestFqdnHostnames-1065104165-project-member] Lock "748c94c7-1233-44f4-a71a-176b26518399" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.526s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.071713] env[62070]: DEBUG nova.policy [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a0162190099744eba0d646a05de23435', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3e9dab208bda46418b994df4359da404', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 838.106581] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.152666] env[62070]: DEBUG oslo_concurrency.lockutils [None req-744939dd-921d-434e-9717-ed223ad0ac5b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "71aead12-a182-40a7-b5a9-91c01271b800" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.156036] env[62070]: DEBUG oslo_concurrency.lockutils [None req-744939dd-921d-434e-9717-ed223ad0ac5b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "71aead12-a182-40a7-b5a9-91c01271b800" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.156036] env[62070]: DEBUG nova.compute.manager [None req-744939dd-921d-434e-9717-ed223ad0ac5b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 838.156036] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4cb6c1-53c7-4fd7-b5f4-ea78af696206 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.162474] env[62070]: DEBUG nova.compute.manager [None req-744939dd-921d-434e-9717-ed223ad0ac5b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62070) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 838.163125] env[62070]: DEBUG nova.objects.instance [None req-744939dd-921d-434e-9717-ed223ad0ac5b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lazy-loading 'flavor' on Instance uuid 71aead12-a182-40a7-b5a9-91c01271b800 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 838.331362] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121812, 'name': PowerOnVM_Task} progress is 96%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.357736] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "refresh_cache-27987ff6-77c9-4876-8b39-dcc20ce4158a" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.358092] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquired lock "refresh_cache-27987ff6-77c9-4876-8b39-dcc20ce4158a" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.358314] env[62070]: DEBUG nova.network.neutron [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 838.386276] env[62070]: DEBUG oslo_vmware.api [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121811, 'name': ReconfigVM_Task, 'duration_secs': 0.643972} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.386276] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 65fe3720-95cb-4620-b1c7-eae9e3bc3943/65fe3720-95cb-4620-b1c7-eae9e3bc3943.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 838.387220] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-12f4909c-b148-4d69-ae85-885914ef3342 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.398647] env[62070]: DEBUG oslo_vmware.api [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 838.398647] env[62070]: value = "task-1121813" [ 838.398647] env[62070]: _type = "Task" [ 838.398647] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.422528] env[62070]: DEBUG oslo_vmware.api [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121813, 'name': Rename_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.512577] env[62070]: DEBUG nova.compute.manager [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 838.672373] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-744939dd-921d-434e-9717-ed223ad0ac5b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 838.672373] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26773e46-64ca-4005-86cb-43d8cfbb5408 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.683508] env[62070]: DEBUG oslo_vmware.api [None req-744939dd-921d-434e-9717-ed223ad0ac5b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 838.683508] env[62070]: value = "task-1121814" [ 838.683508] env[62070]: _type = "Task" [ 838.683508] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.699595] env[62070]: DEBUG oslo_vmware.api [None req-744939dd-921d-434e-9717-ed223ad0ac5b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121814, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.727209] env[62070]: DEBUG nova.network.neutron [req-ee71e1ef-3a83-4d46-b4d5-de73efa9d137 req-1d751673-ea55-445b-9516-a4ca3034d7eb service nova] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Updated VIF entry in instance network info cache for port 3a8213ef-a979-487a-8756-7bfecdf4ba10. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 838.727209] env[62070]: DEBUG nova.network.neutron [req-ee71e1ef-3a83-4d46-b4d5-de73efa9d137 req-1d751673-ea55-445b-9516-a4ca3034d7eb service nova] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Updating instance_info_cache with network_info: [{"id": "3a8213ef-a979-487a-8756-7bfecdf4ba10", "address": "fa:16:3e:25:2e:9f", "network": {"id": "443d2d62-bcef-44b2-814a-3e5dc50abc04", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-772061432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85c0cc8e0f544bfbb76970d3123fbb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a8213ef-a9", "ovs_interfaceid": "3a8213ef-a979-487a-8756-7bfecdf4ba10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.838088] env[62070]: DEBUG oslo_vmware.api [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121812, 'name': PowerOnVM_Task, 'duration_secs': 0.583605} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.838385] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 838.838587] env[62070]: INFO nova.compute.manager [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Took 9.77 seconds to spawn the instance on the hypervisor. [ 838.838849] env[62070]: DEBUG nova.compute.manager [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 838.839738] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6b8cea6-8f37-4ca8-8ac4-4bb4b1d326a5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.848366] env[62070]: DEBUG nova.network.neutron [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Successfully created port: 63044c40-0b6c-4711-9987-e4b6dec9f8b5 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 838.916238] env[62070]: DEBUG oslo_vmware.api [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121813, 'name': Rename_Task, 'duration_secs': 0.287718} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.916633] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 838.919386] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-15ff562f-1a2a-4118-98ff-cf5499bb0452 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.929325] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "f75ed36e-16c8-4a6b-bd39-eb4057ef0691" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.929601] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "f75ed36e-16c8-4a6b-bd39-eb4057ef0691" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.929857] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "f75ed36e-16c8-4a6b-bd39-eb4057ef0691-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.930061] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "f75ed36e-16c8-4a6b-bd39-eb4057ef0691-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.930242] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "f75ed36e-16c8-4a6b-bd39-eb4057ef0691-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.933044] env[62070]: DEBUG oslo_vmware.api [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 838.933044] env[62070]: value = "task-1121815" [ 838.933044] env[62070]: _type = "Task" [ 838.933044] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.934088] env[62070]: INFO nova.compute.manager [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Terminating instance [ 838.936669] env[62070]: DEBUG nova.network.neutron [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 838.943580] env[62070]: DEBUG nova.compute.manager [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 838.943746] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 838.949346] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b37db2db-502e-444e-8a85-5c211cfbf4e3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.967845] env[62070]: DEBUG oslo_vmware.api [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121815, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.968369] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 838.968966] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-56087a15-35fc-4535-8e2e-ce16262fa62c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.985188] env[62070]: DEBUG oslo_vmware.api [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 838.985188] env[62070]: value = "task-1121816" [ 838.985188] env[62070]: _type = "Task" [ 838.985188] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.006642] env[62070]: DEBUG oslo_vmware.api [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121816, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.100781] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-827cc216-b059-41f4-8a7b-d7a0e27a41b4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.111773] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57bced15-e190-4a4d-bb72-d8a2b28eafe5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.152665] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac782c25-d9ae-4ffa-9f19-ac3a284342f9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.163795] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c981beb8-e0d6-409f-8594-a7687d5c9ad7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.183662] env[62070]: DEBUG nova.compute.provider_tree [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 839.195406] env[62070]: DEBUG oslo_vmware.api [None req-744939dd-921d-434e-9717-ed223ad0ac5b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121814, 'name': PowerOffVM_Task, 'duration_secs': 0.402938} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.196629] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-744939dd-921d-434e-9717-ed223ad0ac5b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 839.196872] env[62070]: DEBUG nova.compute.manager [None req-744939dd-921d-434e-9717-ed223ad0ac5b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 839.198063] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf766e81-fbff-468d-92fb-00f866ccbcb2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.229045] env[62070]: DEBUG oslo_concurrency.lockutils [req-ee71e1ef-3a83-4d46-b4d5-de73efa9d137 req-1d751673-ea55-445b-9516-a4ca3034d7eb service nova] Releasing lock "refresh_cache-4a5f644a-1670-4c6b-a762-f87f1ee4cce5" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.368645] env[62070]: INFO nova.compute.manager [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Took 45.71 seconds to build instance. [ 839.405580] env[62070]: DEBUG nova.network.neutron [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Updating instance_info_cache with network_info: [{"id": "ec5674a0-9a0f-48f3-ad88-00fe5f326e8c", "address": "fa:16:3e:f8:f1:7f", "network": {"id": "b61ea502-bdfd-4ddf-8bb9-1f2f2f003f65", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-216003123-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e9dab208bda46418b994df4359da404", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "099fe970-c61f-4480-bed4-ae4f485fd82a", "external-id": "nsx-vlan-transportzone-678", "segmentation_id": 678, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec5674a0-9a", "ovs_interfaceid": "ec5674a0-9a0f-48f3-ad88-00fe5f326e8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.454730] env[62070]: DEBUG oslo_vmware.api [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121815, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.496482] env[62070]: DEBUG oslo_vmware.api [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121816, 'name': PowerOffVM_Task, 'duration_secs': 0.251226} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.496768] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 839.496938] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 839.498100] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1bd11d89-47e0-4c5d-8b89-faed33d9d669 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.531307] env[62070]: DEBUG nova.compute.manager [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 839.558289] env[62070]: DEBUG nova.virt.hardware [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 839.561116] env[62070]: DEBUG nova.virt.hardware [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 839.561116] env[62070]: DEBUG nova.virt.hardware [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 839.561116] env[62070]: DEBUG nova.virt.hardware [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 839.561116] env[62070]: DEBUG nova.virt.hardware [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 839.561116] env[62070]: DEBUG nova.virt.hardware [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 839.561116] env[62070]: DEBUG nova.virt.hardware [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 839.561116] env[62070]: DEBUG nova.virt.hardware [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 839.561116] env[62070]: DEBUG nova.virt.hardware [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 839.561116] env[62070]: DEBUG nova.virt.hardware [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 839.561422] env[62070]: DEBUG nova.virt.hardware [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 839.562938] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19244709-782b-4b43-8719-9ac1b8368ccb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.578328] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-337ed7e3-7293-4dde-85c6-ce20fd889932 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.584854] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 839.585147] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 839.585349] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Deleting the datastore file [datastore2] f75ed36e-16c8-4a6b-bd39-eb4057ef0691 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 839.585626] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ffecf93d-3e1a-432a-8cdb-ffddcdce5a30 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.607133] env[62070]: DEBUG oslo_vmware.api [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 839.607133] env[62070]: value = "task-1121818" [ 839.607133] env[62070]: _type = "Task" [ 839.607133] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.616642] env[62070]: DEBUG oslo_vmware.api [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121818, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.686844] env[62070]: DEBUG nova.scheduler.client.report [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 839.717997] env[62070]: DEBUG oslo_concurrency.lockutils [None req-744939dd-921d-434e-9717-ed223ad0ac5b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "71aead12-a182-40a7-b5a9-91c01271b800" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.565s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.872557] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cbed1b11-9034-4566-ae5c-78171a6c24e6 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Lock "e74fd58c-cfa8-45c4-8f02-96234b4a9192" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.509s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.909254] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Releasing lock "refresh_cache-27987ff6-77c9-4876-8b39-dcc20ce4158a" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.909254] env[62070]: DEBUG nova.compute.manager [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Instance network_info: |[{"id": "ec5674a0-9a0f-48f3-ad88-00fe5f326e8c", "address": "fa:16:3e:f8:f1:7f", "network": {"id": "b61ea502-bdfd-4ddf-8bb9-1f2f2f003f65", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-216003123-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e9dab208bda46418b994df4359da404", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "099fe970-c61f-4480-bed4-ae4f485fd82a", "external-id": "nsx-vlan-transportzone-678", "segmentation_id": 678, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec5674a0-9a", "ovs_interfaceid": "ec5674a0-9a0f-48f3-ad88-00fe5f326e8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 839.909254] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:f1:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '099fe970-c61f-4480-bed4-ae4f485fd82a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ec5674a0-9a0f-48f3-ad88-00fe5f326e8c', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 839.917041] env[62070]: DEBUG oslo.service.loopingcall [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 839.918539] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 839.921594] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9c5e9b30-fd9e-432f-994a-7e9a19f67a53 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.960033] env[62070]: DEBUG oslo_vmware.api [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121815, 'name': PowerOnVM_Task, 'duration_secs': 0.846703} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.960033] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 839.960033] env[62070]: INFO nova.compute.manager [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Took 8.43 seconds to spawn the instance on the hypervisor. [ 839.960033] env[62070]: DEBUG nova.compute.manager [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 839.960033] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 839.960033] env[62070]: value = "task-1121819" [ 839.960033] env[62070]: _type = "Task" [ 839.960033] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.960740] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-579eeb97-9b30-47c5-8323-bc1524f04284 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.979911] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121819, 'name': CreateVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.118400] env[62070]: DEBUG oslo_vmware.api [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121818, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163244} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.118692] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 840.118886] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 840.119084] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 840.119270] env[62070]: INFO nova.compute.manager [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Took 1.18 seconds to destroy the instance on the hypervisor. [ 840.119525] env[62070]: DEBUG oslo.service.loopingcall [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 840.119733] env[62070]: DEBUG nova.compute.manager [-] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 840.119832] env[62070]: DEBUG nova.network.neutron [-] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 840.192458] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.693s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.193018] env[62070]: DEBUG nova.compute.manager [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 840.197738] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.017s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.197738] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.199755] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.357s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.199974] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.203725] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 20.083s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.203922] env[62070]: DEBUG nova.objects.instance [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62070) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 840.248237] env[62070]: INFO nova.scheduler.client.report [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Deleted allocations for instance efef4aac-5b74-4a41-9f74-3d4cb4f80cdb [ 840.252291] env[62070]: INFO nova.scheduler.client.report [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Deleted allocations for instance dd5d90e8-964a-4e1c-a98a-bcba37a1d79e [ 840.275430] env[62070]: INFO nova.compute.manager [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Rescuing [ 840.275430] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquiring lock "refresh_cache-e74fd58c-cfa8-45c4-8f02-96234b4a9192" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.275430] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquired lock "refresh_cache-e74fd58c-cfa8-45c4-8f02-96234b4a9192" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.275539] env[62070]: DEBUG nova.network.neutron [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 840.443315] env[62070]: INFO nova.compute.manager [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Rescuing [ 840.443617] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "refresh_cache-3d22f50a-e1b7-48f9-a044-df64d01dfeb4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.443778] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquired lock "refresh_cache-3d22f50a-e1b7-48f9-a044-df64d01dfeb4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.443953] env[62070]: DEBUG nova.network.neutron [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 840.477368] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121819, 'name': CreateVM_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.499879] env[62070]: INFO nova.compute.manager [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Took 43.45 seconds to build instance. [ 840.717256] env[62070]: DEBUG nova.compute.utils [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 840.725586] env[62070]: DEBUG nova.compute.manager [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 840.725586] env[62070]: DEBUG nova.network.neutron [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 840.768098] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e216791d-b9a4-4463-83e2-194be4723308 tempest-ServerMetadataTestJSON-571714377 tempest-ServerMetadataTestJSON-571714377-project-member] Lock "efef4aac-5b74-4a41-9f74-3d4cb4f80cdb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.880s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.789823] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cfb5fa31-e2c0-473b-afb7-d7e329440a5c tempest-ServersTestBootFromVolume-622206804 tempest-ServersTestBootFromVolume-622206804-project-member] Lock "dd5d90e8-964a-4e1c-a98a-bcba37a1d79e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.861s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.867050] env[62070]: DEBUG nova.compute.manager [req-10247813-5f61-4f73-bc25-26f9c4987399 req-ba4989c8-15d8-4b9c-b9f9-e51729f470fd service nova] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Received event network-changed-ec5674a0-9a0f-48f3-ad88-00fe5f326e8c {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 840.867050] env[62070]: DEBUG nova.compute.manager [req-10247813-5f61-4f73-bc25-26f9c4987399 req-ba4989c8-15d8-4b9c-b9f9-e51729f470fd service nova] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Refreshing instance network info cache due to event network-changed-ec5674a0-9a0f-48f3-ad88-00fe5f326e8c. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 840.867050] env[62070]: DEBUG oslo_concurrency.lockutils [req-10247813-5f61-4f73-bc25-26f9c4987399 req-ba4989c8-15d8-4b9c-b9f9-e51729f470fd service nova] Acquiring lock "refresh_cache-27987ff6-77c9-4876-8b39-dcc20ce4158a" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.867050] env[62070]: DEBUG oslo_concurrency.lockutils [req-10247813-5f61-4f73-bc25-26f9c4987399 req-ba4989c8-15d8-4b9c-b9f9-e51729f470fd service nova] Acquired lock "refresh_cache-27987ff6-77c9-4876-8b39-dcc20ce4158a" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.867050] env[62070]: DEBUG nova.network.neutron [req-10247813-5f61-4f73-bc25-26f9c4987399 req-ba4989c8-15d8-4b9c-b9f9-e51729f470fd service nova] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Refreshing network info cache for port ec5674a0-9a0f-48f3-ad88-00fe5f326e8c {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 840.892294] env[62070]: DEBUG nova.policy [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0aa820b3e16d4d6fbc6bda0b232025fc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c91e5eeeeb1742f499b2edaf76a93a3b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 840.979159] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121819, 'name': CreateVM_Task, 'duration_secs': 0.76496} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.979451] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 840.981214] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.981983] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.982373] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 840.983200] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fcffb3d-23fe-48ab-a824-585f7340c073 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.991685] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 840.991685] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52c5b90f-28b6-84e9-9fe3-bd57693c2dc1" [ 840.991685] env[62070]: _type = "Task" [ 840.991685] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.007113] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b5d7932b-5fe7-4bae-8652-828c067cb041 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "65fe3720-95cb-4620-b1c7-eae9e3bc3943" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.647s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.007485] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52c5b90f-28b6-84e9-9fe3-bd57693c2dc1, 'name': SearchDatastore_Task, 'duration_secs': 0.011537} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.007950] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.010549] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 841.010951] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.011176] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.011424] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 841.011781] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b70c5171-4f2a-4845-9b72-e9d7ac9aa484 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.023471] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 841.023760] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 841.024729] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0974d96d-56a8-4f4b-8d08-6d78d891e763 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.035853] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 841.035853] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52158d88-2308-8b85-c396-ec27814239f2" [ 841.035853] env[62070]: _type = "Task" [ 841.035853] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.045928] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52158d88-2308-8b85-c396-ec27814239f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.225451] env[62070]: DEBUG nova.compute.manager [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 841.230320] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cc6a54b3-9744-479e-b7db-0a87bbf229c3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.026s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.231770] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.833s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.234235] env[62070]: INFO nova.compute.claims [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 841.310938] env[62070]: DEBUG nova.network.neutron [-] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.522375] env[62070]: DEBUG nova.network.neutron [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Updating instance_info_cache with network_info: [{"id": "aef8b9b0-4bbd-4af6-b65d-f7e964775fd4", "address": "fa:16:3e:be:d4:45", "network": {"id": "754f4ec8-0bc6-4726-8b88-1a4e1a326699", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-293486644-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a94db233e3a43dc9aa7cf887c6cb1f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaef8b9b0-4b", "ovs_interfaceid": "aef8b9b0-4bbd-4af6-b65d-f7e964775fd4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.536165] env[62070]: DEBUG nova.network.neutron [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Updating instance_info_cache with network_info: [{"id": "6444a30f-9c50-4eaf-b562-178b627dc0f1", "address": "fa:16:3e:ec:99:7c", "network": {"id": "08004b49-dbc2-4186-9e28-4268e947e8ee", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2022236674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2ecbd5f22c024de8a6b1c45096cb79a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6444a30f-9c", "ovs_interfaceid": "6444a30f-9c50-4eaf-b562-178b627dc0f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.552361] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52158d88-2308-8b85-c396-ec27814239f2, 'name': SearchDatastore_Task, 'duration_secs': 0.017635} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.553253] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6e5af9d-936a-4f34-85c3-dc984ab8cf49 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.562737] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 841.562737] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52d17d62-6bcc-0be0-a56a-62bf6d33c7ee" [ 841.562737] env[62070]: _type = "Task" [ 841.562737] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.577853] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52d17d62-6bcc-0be0-a56a-62bf6d33c7ee, 'name': SearchDatastore_Task, 'duration_secs': 0.011514} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.578179] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.578489] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 27987ff6-77c9-4876-8b39-dcc20ce4158a/27987ff6-77c9-4876-8b39-dcc20ce4158a.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 841.578831] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-72df865f-44fd-43bf-b78d-cd7cd84f1889 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.587906] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 841.587906] env[62070]: value = "task-1121820" [ 841.587906] env[62070]: _type = "Task" [ 841.587906] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.598836] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121820, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.719001] env[62070]: DEBUG nova.compute.manager [req-737fe1d0-94db-455a-b6be-37231a1a31c2 req-eb4a70f3-05b4-4d0c-bec6-343177b24faf service nova] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Received event network-vif-deleted-48f9f765-2091-4515-9b46-46217835d2df {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 841.813027] env[62070]: INFO nova.compute.manager [-] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Took 1.69 seconds to deallocate network for instance. [ 841.861840] env[62070]: DEBUG nova.network.neutron [req-10247813-5f61-4f73-bc25-26f9c4987399 req-ba4989c8-15d8-4b9c-b9f9-e51729f470fd service nova] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Updated VIF entry in instance network info cache for port ec5674a0-9a0f-48f3-ad88-00fe5f326e8c. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 841.862310] env[62070]: DEBUG nova.network.neutron [req-10247813-5f61-4f73-bc25-26f9c4987399 req-ba4989c8-15d8-4b9c-b9f9-e51729f470fd service nova] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Updating instance_info_cache with network_info: [{"id": "ec5674a0-9a0f-48f3-ad88-00fe5f326e8c", "address": "fa:16:3e:f8:f1:7f", "network": {"id": "b61ea502-bdfd-4ddf-8bb9-1f2f2f003f65", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-216003123-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e9dab208bda46418b994df4359da404", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "099fe970-c61f-4480-bed4-ae4f485fd82a", "external-id": "nsx-vlan-transportzone-678", "segmentation_id": 678, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec5674a0-9a", "ovs_interfaceid": "ec5674a0-9a0f-48f3-ad88-00fe5f326e8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.903869] env[62070]: DEBUG nova.network.neutron [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Successfully updated port: 63044c40-0b6c-4711-9987-e4b6dec9f8b5 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 841.980866] env[62070]: DEBUG nova.network.neutron [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Successfully created port: 5a04b813-465f-4855-8707-4db273ba30fd {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 842.025858] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Releasing lock "refresh_cache-3d22f50a-e1b7-48f9-a044-df64d01dfeb4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.039634] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Releasing lock "refresh_cache-e74fd58c-cfa8-45c4-8f02-96234b4a9192" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.104097] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121820, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.240052] env[62070]: DEBUG nova.compute.manager [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 842.274253] env[62070]: DEBUG nova.virt.hardware [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 842.274700] env[62070]: DEBUG nova.virt.hardware [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 842.274805] env[62070]: DEBUG nova.virt.hardware [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 842.275811] env[62070]: DEBUG nova.virt.hardware [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 842.275811] env[62070]: DEBUG nova.virt.hardware [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 842.275811] env[62070]: DEBUG nova.virt.hardware [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 842.275811] env[62070]: DEBUG nova.virt.hardware [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 842.275811] env[62070]: DEBUG nova.virt.hardware [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 842.275999] env[62070]: DEBUG nova.virt.hardware [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 842.276300] env[62070]: DEBUG nova.virt.hardware [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 842.276354] env[62070]: DEBUG nova.virt.hardware [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 842.277286] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4bf2420-af65-48a5-8a7f-53bbc3568753 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.296146] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-311dd740-d7c3-4c00-aed9-35ed9b04590a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.323034] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.363685] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Acquiring lock "242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.365593] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lock "242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.365593] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Acquiring lock "242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.365593] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lock "242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.365593] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lock "242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.370793] env[62070]: INFO nova.compute.manager [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Terminating instance [ 842.373262] env[62070]: DEBUG oslo_concurrency.lockutils [req-10247813-5f61-4f73-bc25-26f9c4987399 req-ba4989c8-15d8-4b9c-b9f9-e51729f470fd service nova] Releasing lock "refresh_cache-27987ff6-77c9-4876-8b39-dcc20ce4158a" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.374504] env[62070]: DEBUG nova.compute.manager [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 842.374791] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 842.375739] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e8259ff-13d9-4239-a6e4-eab100202121 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.384905] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 842.385243] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0778c072-f570-4b9c-974a-be6c9fef7a5a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.396961] env[62070]: DEBUG oslo_vmware.api [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 842.396961] env[62070]: value = "task-1121821" [ 842.396961] env[62070]: _type = "Task" [ 842.396961] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.408879] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "refresh_cache-e5deccf6-f967-4e3c-bee0-2e1ad0bb4560" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.408879] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquired lock "refresh_cache-e5deccf6-f967-4e3c-bee0-2e1ad0bb4560" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.408879] env[62070]: DEBUG nova.network.neutron [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 842.410096] env[62070]: DEBUG oslo_vmware.api [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121821, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.534588] env[62070]: DEBUG nova.objects.instance [None req-3c937880-263f-4058-86ea-f550d2687498 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lazy-loading 'flavor' on Instance uuid 71aead12-a182-40a7-b5a9-91c01271b800 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 842.588471] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 842.588857] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b9a8f950-b188-4488-9d59-18817c8bb2fc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.593211] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 842.593819] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73e42bbf-906c-488a-86d7-e48ecb3b6aa8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.615984] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121820, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.616558] env[62070]: DEBUG oslo_vmware.api [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 842.616558] env[62070]: value = "task-1121823" [ 842.616558] env[62070]: _type = "Task" [ 842.616558] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.618980] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 842.618980] env[62070]: value = "task-1121822" [ 842.618980] env[62070]: _type = "Task" [ 842.618980] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.644088] env[62070]: DEBUG oslo_concurrency.lockutils [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "65fe3720-95cb-4620-b1c7-eae9e3bc3943" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.644724] env[62070]: DEBUG oslo_concurrency.lockutils [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "65fe3720-95cb-4620-b1c7-eae9e3bc3943" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.644826] env[62070]: DEBUG oslo_concurrency.lockutils [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "65fe3720-95cb-4620-b1c7-eae9e3bc3943-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.645139] env[62070]: DEBUG oslo_concurrency.lockutils [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "65fe3720-95cb-4620-b1c7-eae9e3bc3943-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.646071] env[62070]: DEBUG oslo_concurrency.lockutils [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "65fe3720-95cb-4620-b1c7-eae9e3bc3943-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.648137] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121822, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.653528] env[62070]: INFO nova.compute.manager [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Terminating instance [ 842.655951] env[62070]: DEBUG oslo_vmware.api [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121823, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.660726] env[62070]: DEBUG nova.compute.manager [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 842.661219] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 842.662995] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2926402e-ab21-4a37-8252-4540e333d0f2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.675282] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 842.676172] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f4649c76-4c80-42a1-b027-bfdf2b47e9c8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.687338] env[62070]: DEBUG oslo_vmware.api [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 842.687338] env[62070]: value = "task-1121824" [ 842.687338] env[62070]: _type = "Task" [ 842.687338] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.699386] env[62070]: DEBUG oslo_vmware.api [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121824, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.806776] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d50ea5e-bf1c-491d-8b75-cdfbd71bc68d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.814947] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c38120-1b63-4bbd-bc1f-b11537ac6ae4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.847864] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72cae51e-ad2b-47ef-80bd-8b5bcaea41e0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.857183] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca8b2c8-b629-4dc2-bbc4-2494a91ad9b0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.873302] env[62070]: DEBUG nova.compute.provider_tree [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 842.909975] env[62070]: DEBUG oslo_vmware.api [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121821, 'name': PowerOffVM_Task, 'duration_secs': 0.254815} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.910110] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 842.910292] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 842.910568] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2eeae753-a63b-433f-bbb6-df6a2cf3c1ce {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.995408] env[62070]: DEBUG nova.network.neutron [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 843.003133] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 843.003466] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 843.003578] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Deleting the datastore file [datastore2] 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 843.003997] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1bf08472-2afe-4fc5-82d0-eddc17340128 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.012548] env[62070]: DEBUG oslo_vmware.api [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 843.012548] env[62070]: value = "task-1121826" [ 843.012548] env[62070]: _type = "Task" [ 843.012548] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.022766] env[62070]: DEBUG oslo_vmware.api [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121826, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.040837] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3c937880-263f-4058-86ea-f550d2687498 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.041065] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3c937880-263f-4058-86ea-f550d2687498 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.041508] env[62070]: DEBUG nova.network.neutron [None req-3c937880-263f-4058-86ea-f550d2687498 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 843.041594] env[62070]: DEBUG nova.objects.instance [None req-3c937880-263f-4058-86ea-f550d2687498 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lazy-loading 'info_cache' on Instance uuid 71aead12-a182-40a7-b5a9-91c01271b800 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 843.103997] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121820, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.131673] env[62070]: DEBUG oslo_vmware.api [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121823, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.141133] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121822, 'name': PowerOffVM_Task, 'duration_secs': 0.420211} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.141452] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 843.142462] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-448283b5-f4d1-430b-88ab-e0c1125ef4f2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.165055] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8e32a0-1abf-4cd7-b857-5034c4b50f47 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.198026] env[62070]: DEBUG oslo_vmware.api [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121824, 'name': PowerOffVM_Task, 'duration_secs': 0.371006} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.198457] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 843.198457] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 843.198782] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7689861f-5217-400d-ab81-cc60e4486f23 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.202173] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 843.202403] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc94d139-86ee-4fa9-b29c-ef6e08af3761 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.212955] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 843.212955] env[62070]: value = "task-1121828" [ 843.212955] env[62070]: _type = "Task" [ 843.212955] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.223894] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121828, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.298781] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 843.299236] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 843.299357] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Deleting the datastore file [datastore2] 65fe3720-95cb-4620-b1c7-eae9e3bc3943 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 843.299546] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-70172ca8-61fe-48c0-aab8-639b3475405d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.302507] env[62070]: DEBUG nova.network.neutron [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Updating instance_info_cache with network_info: [{"id": "63044c40-0b6c-4711-9987-e4b6dec9f8b5", "address": "fa:16:3e:9d:f9:59", "network": {"id": "b61ea502-bdfd-4ddf-8bb9-1f2f2f003f65", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-216003123-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e9dab208bda46418b994df4359da404", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "099fe970-c61f-4480-bed4-ae4f485fd82a", "external-id": "nsx-vlan-transportzone-678", "segmentation_id": 678, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63044c40-0b", "ovs_interfaceid": "63044c40-0b6c-4711-9987-e4b6dec9f8b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.311667] env[62070]: DEBUG oslo_vmware.api [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 843.311667] env[62070]: value = "task-1121829" [ 843.311667] env[62070]: _type = "Task" [ 843.311667] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.319870] env[62070]: DEBUG oslo_vmware.api [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121829, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.378043] env[62070]: DEBUG nova.scheduler.client.report [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 843.413320] env[62070]: DEBUG nova.compute.manager [req-ba5c8240-d3ea-4106-9a38-eb4a637f8508 req-f217f2a0-9ca2-45bd-bf62-c3f018d3c386 service nova] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Received event network-vif-plugged-63044c40-0b6c-4711-9987-e4b6dec9f8b5 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 843.413591] env[62070]: DEBUG oslo_concurrency.lockutils [req-ba5c8240-d3ea-4106-9a38-eb4a637f8508 req-f217f2a0-9ca2-45bd-bf62-c3f018d3c386 service nova] Acquiring lock "e5deccf6-f967-4e3c-bee0-2e1ad0bb4560-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.413854] env[62070]: DEBUG oslo_concurrency.lockutils [req-ba5c8240-d3ea-4106-9a38-eb4a637f8508 req-f217f2a0-9ca2-45bd-bf62-c3f018d3c386 service nova] Lock "e5deccf6-f967-4e3c-bee0-2e1ad0bb4560-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.415917] env[62070]: DEBUG oslo_concurrency.lockutils [req-ba5c8240-d3ea-4106-9a38-eb4a637f8508 req-f217f2a0-9ca2-45bd-bf62-c3f018d3c386 service nova] Lock "e5deccf6-f967-4e3c-bee0-2e1ad0bb4560-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.415917] env[62070]: DEBUG nova.compute.manager [req-ba5c8240-d3ea-4106-9a38-eb4a637f8508 req-f217f2a0-9ca2-45bd-bf62-c3f018d3c386 service nova] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] No waiting events found dispatching network-vif-plugged-63044c40-0b6c-4711-9987-e4b6dec9f8b5 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 843.415917] env[62070]: WARNING nova.compute.manager [req-ba5c8240-d3ea-4106-9a38-eb4a637f8508 req-f217f2a0-9ca2-45bd-bf62-c3f018d3c386 service nova] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Received unexpected event network-vif-plugged-63044c40-0b6c-4711-9987-e4b6dec9f8b5 for instance with vm_state building and task_state spawning. [ 843.416249] env[62070]: DEBUG nova.compute.manager [req-ba5c8240-d3ea-4106-9a38-eb4a637f8508 req-f217f2a0-9ca2-45bd-bf62-c3f018d3c386 service nova] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Received event network-changed-63044c40-0b6c-4711-9987-e4b6dec9f8b5 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 843.416499] env[62070]: DEBUG nova.compute.manager [req-ba5c8240-d3ea-4106-9a38-eb4a637f8508 req-f217f2a0-9ca2-45bd-bf62-c3f018d3c386 service nova] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Refreshing instance network info cache due to event network-changed-63044c40-0b6c-4711-9987-e4b6dec9f8b5. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 843.416740] env[62070]: DEBUG oslo_concurrency.lockutils [req-ba5c8240-d3ea-4106-9a38-eb4a637f8508 req-f217f2a0-9ca2-45bd-bf62-c3f018d3c386 service nova] Acquiring lock "refresh_cache-e5deccf6-f967-4e3c-bee0-2e1ad0bb4560" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.527450] env[62070]: DEBUG oslo_vmware.api [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121826, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.2009} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.527450] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 843.527450] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 843.527450] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 843.527450] env[62070]: INFO nova.compute.manager [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Took 1.15 seconds to destroy the instance on the hypervisor. [ 843.527450] env[62070]: DEBUG oslo.service.loopingcall [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 843.527450] env[62070]: DEBUG nova.compute.manager [-] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 843.527450] env[62070]: DEBUG nova.network.neutron [-] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 843.549022] env[62070]: DEBUG nova.objects.base [None req-3c937880-263f-4058-86ea-f550d2687498 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Object Instance<71aead12-a182-40a7-b5a9-91c01271b800> lazy-loaded attributes: flavor,info_cache {{(pid=62070) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 843.608288] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121820, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.549669} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.608518] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 27987ff6-77c9-4876-8b39-dcc20ce4158a/27987ff6-77c9-4876-8b39-dcc20ce4158a.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 843.608742] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 843.609015] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ec4015f3-05df-4e04-9fab-60628f4c0663 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.617205] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 843.617205] env[62070]: value = "task-1121830" [ 843.617205] env[62070]: _type = "Task" [ 843.617205] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.632976] env[62070]: DEBUG oslo_vmware.api [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121823, 'name': PowerOffVM_Task, 'duration_secs': 0.82757} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.635924] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 843.636338] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121830, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.637322] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a50e72d-e6f4-4387-b630-0bd6d5c27890 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.659503] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b32695-1070-49e5-a80f-a063bae141dc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.699736] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 843.699995] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5228b066-7602-4317-9f02-1dce9c5b31e2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.713840] env[62070]: DEBUG oslo_vmware.api [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 843.713840] env[62070]: value = "task-1121831" [ 843.713840] env[62070]: _type = "Task" [ 843.713840] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.731785] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] VM already powered off {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 843.732110] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 843.732392] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.732585] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.736097] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 843.736097] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f2f2d899-6081-42c0-8726-3040505a76b6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.736097] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] VM already powered off {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 843.736097] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 843.736097] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.745937] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 843.746164] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 843.746990] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a13c2c1-fe17-4a84-80f4-c067f9e4270d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.753608] env[62070]: DEBUG oslo_vmware.api [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 843.753608] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52acb75b-7865-f5f0-44a7-90a151185107" [ 843.753608] env[62070]: _type = "Task" [ 843.753608] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.762853] env[62070]: DEBUG oslo_vmware.api [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52acb75b-7865-f5f0-44a7-90a151185107, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.805950] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Releasing lock "refresh_cache-e5deccf6-f967-4e3c-bee0-2e1ad0bb4560" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.807237] env[62070]: DEBUG nova.compute.manager [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Instance network_info: |[{"id": "63044c40-0b6c-4711-9987-e4b6dec9f8b5", "address": "fa:16:3e:9d:f9:59", "network": {"id": "b61ea502-bdfd-4ddf-8bb9-1f2f2f003f65", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-216003123-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e9dab208bda46418b994df4359da404", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "099fe970-c61f-4480-bed4-ae4f485fd82a", "external-id": "nsx-vlan-transportzone-678", "segmentation_id": 678, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63044c40-0b", "ovs_interfaceid": "63044c40-0b6c-4711-9987-e4b6dec9f8b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 843.807237] env[62070]: DEBUG oslo_concurrency.lockutils [req-ba5c8240-d3ea-4106-9a38-eb4a637f8508 req-f217f2a0-9ca2-45bd-bf62-c3f018d3c386 service nova] Acquired lock "refresh_cache-e5deccf6-f967-4e3c-bee0-2e1ad0bb4560" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.807391] env[62070]: DEBUG nova.network.neutron [req-ba5c8240-d3ea-4106-9a38-eb4a637f8508 req-f217f2a0-9ca2-45bd-bf62-c3f018d3c386 service nova] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Refreshing network info cache for port 63044c40-0b6c-4711-9987-e4b6dec9f8b5 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 843.812205] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:f9:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '099fe970-c61f-4480-bed4-ae4f485fd82a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '63044c40-0b6c-4711-9987-e4b6dec9f8b5', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 843.825158] env[62070]: DEBUG oslo.service.loopingcall [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 843.826015] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 843.829818] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ad29d1ce-d0e7-4b06-bfdc-bf8dcffa8211 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.861658] env[62070]: DEBUG oslo_vmware.api [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1121829, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199711} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.864242] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 843.864432] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 843.864730] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 843.865059] env[62070]: INFO nova.compute.manager [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Took 1.20 seconds to destroy the instance on the hypervisor. [ 843.865460] env[62070]: DEBUG oslo.service.loopingcall [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 843.865931] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 843.865931] env[62070]: value = "task-1121832" [ 843.865931] env[62070]: _type = "Task" [ 843.865931] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.866156] env[62070]: DEBUG nova.compute.manager [-] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 843.866258] env[62070]: DEBUG nova.network.neutron [-] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 843.883087] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.651s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.883644] env[62070]: DEBUG nova.compute.manager [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 843.886634] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121832, 'name': CreateVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.887235] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.161s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.887775] env[62070]: DEBUG nova.objects.instance [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lazy-loading 'resources' on Instance uuid 0ac963b1-120a-464b-8228-3393135dd182 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 844.131535] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121830, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.19583} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.131862] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 844.132712] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cb03eef-f103-4322-b7eb-161b9849d94a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.162836] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] 27987ff6-77c9-4876-8b39-dcc20ce4158a/27987ff6-77c9-4876-8b39-dcc20ce4158a.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 844.167306] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-354f731f-35b9-40b4-b18a-a82c25560654 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.194090] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 844.194090] env[62070]: value = "task-1121833" [ 844.194090] env[62070]: _type = "Task" [ 844.194090] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.206985] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121833, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.269133] env[62070]: DEBUG oslo_vmware.api [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52acb75b-7865-f5f0-44a7-90a151185107, 'name': SearchDatastore_Task, 'duration_secs': 0.019561} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.270109] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef592784-ae44-439f-b060-14b91a42e986 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.277322] env[62070]: DEBUG oslo_vmware.api [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 844.277322] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52a62122-d56f-aeb7-150e-7ff448086c58" [ 844.277322] env[62070]: _type = "Task" [ 844.277322] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.291919] env[62070]: DEBUG oslo_vmware.api [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52a62122-d56f-aeb7-150e-7ff448086c58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.381349] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121832, 'name': CreateVM_Task, 'duration_secs': 0.396785} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.381736] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 844.382325] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.382500] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.382840] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 844.383133] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b885ef95-a5aa-4ca0-bddb-aba14aecb2db {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.389175] env[62070]: DEBUG nova.compute.utils [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 844.391976] env[62070]: DEBUG nova.compute.manager [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 844.392257] env[62070]: DEBUG nova.network.neutron [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 844.394050] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 844.394050] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5275efa3-e7aa-6640-cff4-d3a7e2c0a2d5" [ 844.394050] env[62070]: _type = "Task" [ 844.394050] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.410983] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5275efa3-e7aa-6640-cff4-d3a7e2c0a2d5, 'name': SearchDatastore_Task, 'duration_secs': 0.01756} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.411834] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.412279] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 844.412450] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.412520] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.414631] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 844.416182] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2263e4a4-5555-4570-a2be-a93dd6fd66b5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.431412] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 844.431619] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 844.432493] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5993ae76-bd62-4e74-a132-3f94846d9ab3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.435363] env[62070]: DEBUG nova.network.neutron [-] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.439516] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 844.439516] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52b424d3-827e-76ab-fdf3-917caeb766fb" [ 844.439516] env[62070]: _type = "Task" [ 844.439516] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.449495] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52b424d3-827e-76ab-fdf3-917caeb766fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.649288] env[62070]: DEBUG nova.policy [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f866f97eed1a41b39b4cd552102c6e21', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9191f0e6c2ee401abca64c0780e230bf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 844.707804] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121833, 'name': ReconfigVM_Task, 'duration_secs': 0.372756} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.710664] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Reconfigured VM instance instance-00000042 to attach disk [datastore2] 27987ff6-77c9-4876-8b39-dcc20ce4158a/27987ff6-77c9-4876-8b39-dcc20ce4158a.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 844.712266] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c44e3197-cc49-42af-895c-97fbdc798b31 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.721920] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 844.721920] env[62070]: value = "task-1121834" [ 844.721920] env[62070]: _type = "Task" [ 844.721920] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.741987] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121834, 'name': Rename_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.797755] env[62070]: DEBUG oslo_vmware.api [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52a62122-d56f-aeb7-150e-7ff448086c58, 'name': SearchDatastore_Task, 'duration_secs': 0.015522} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.798342] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.798685] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 3d22f50a-e1b7-48f9-a044-df64d01dfeb4/43ea607c-7ece-4601-9b11-75c6a16aa7dd-rescue.vmdk. {{(pid=62070) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 844.799059] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.799325] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 844.799636] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c273500c-bfe1-459a-869c-1b9107e5ea24 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.801855] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f7f95ea1-25c7-41e1-835d-2f1e61d095e1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.813098] env[62070]: DEBUG oslo_vmware.api [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 844.813098] env[62070]: value = "task-1121835" [ 844.813098] env[62070]: _type = "Task" [ 844.813098] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.815154] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 844.815428] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 844.820968] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b4fc94f-4906-4754-9ed7-f1879f0d263a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.831608] env[62070]: DEBUG oslo_vmware.api [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121835, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.835794] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 844.835794] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]521d4577-1e29-f9dd-d76c-dee5bbf1b8bd" [ 844.835794] env[62070]: _type = "Task" [ 844.835794] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.847141] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]521d4577-1e29-f9dd-d76c-dee5bbf1b8bd, 'name': SearchDatastore_Task, 'duration_secs': 0.010137} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.850630] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f992495-e9ca-454c-ad74-655158bf3329 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.860461] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 844.860461] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52591bc8-1f98-2c72-a6b0-326d98d4f542" [ 844.860461] env[62070]: _type = "Task" [ 844.860461] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.871496] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a310df-d164-4a0b-a87b-7091c5c37314 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.875546] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52591bc8-1f98-2c72-a6b0-326d98d4f542, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.882801] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b0c3a5-659e-4dc5-b363-a2dc27c02b2e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.914808] env[62070]: DEBUG nova.compute.manager [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 844.920931] env[62070]: DEBUG nova.network.neutron [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Successfully updated port: 5a04b813-465f-4855-8707-4db273ba30fd {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 844.922550] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae77a3e4-d200-42e9-8536-d11c961b60b1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.932784] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb4f3c0-489f-4ed7-81ce-aa67cc6d3534 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.940196] env[62070]: INFO nova.compute.manager [-] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Took 1.41 seconds to deallocate network for instance. [ 844.960240] env[62070]: DEBUG nova.compute.provider_tree [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 844.962729] env[62070]: DEBUG nova.network.neutron [None req-3c937880-263f-4058-86ea-f550d2687498 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Updating instance_info_cache with network_info: [{"id": "a3ed0957-14c2-4144-8d45-f4a0e5cb45ab", "address": "fa:16:3e:3c:6a:3d", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3ed0957-14", "ovs_interfaceid": "a3ed0957-14c2-4144-8d45-f4a0e5cb45ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.971517] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52b424d3-827e-76ab-fdf3-917caeb766fb, 'name': SearchDatastore_Task, 'duration_secs': 0.016377} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.972851] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6feca00c-bbb5-42d7-8dff-b204cbef58c9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.981503] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 844.981503] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5291d462-af0d-08f0-dff4-8ca67422cf49" [ 844.981503] env[62070]: _type = "Task" [ 844.981503] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.992269] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5291d462-af0d-08f0-dff4-8ca67422cf49, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.055176] env[62070]: DEBUG nova.network.neutron [req-ba5c8240-d3ea-4106-9a38-eb4a637f8508 req-f217f2a0-9ca2-45bd-bf62-c3f018d3c386 service nova] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Updated VIF entry in instance network info cache for port 63044c40-0b6c-4711-9987-e4b6dec9f8b5. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 845.055551] env[62070]: DEBUG nova.network.neutron [req-ba5c8240-d3ea-4106-9a38-eb4a637f8508 req-f217f2a0-9ca2-45bd-bf62-c3f018d3c386 service nova] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Updating instance_info_cache with network_info: [{"id": "63044c40-0b6c-4711-9987-e4b6dec9f8b5", "address": "fa:16:3e:9d:f9:59", "network": {"id": "b61ea502-bdfd-4ddf-8bb9-1f2f2f003f65", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-216003123-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e9dab208bda46418b994df4359da404", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "099fe970-c61f-4480-bed4-ae4f485fd82a", "external-id": "nsx-vlan-transportzone-678", "segmentation_id": 678, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63044c40-0b", "ovs_interfaceid": "63044c40-0b6c-4711-9987-e4b6dec9f8b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.238176] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121834, 'name': Rename_Task, 'duration_secs': 0.169027} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.238526] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 845.238826] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-be97e67c-9d06-4a97-bd45-601112524fce {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.248484] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 845.248484] env[62070]: value = "task-1121836" [ 845.248484] env[62070]: _type = "Task" [ 845.248484] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.259899] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121836, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.328810] env[62070]: DEBUG oslo_vmware.api [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121835, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501008} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.329117] env[62070]: INFO nova.virt.vmwareapi.ds_util [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 3d22f50a-e1b7-48f9-a044-df64d01dfeb4/43ea607c-7ece-4601-9b11-75c6a16aa7dd-rescue.vmdk. [ 845.329928] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d823b4c-2927-4a12-a7dd-b7e4f56575b9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.360740] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 3d22f50a-e1b7-48f9-a044-df64d01dfeb4/43ea607c-7ece-4601-9b11-75c6a16aa7dd-rescue.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 845.361790] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5e7350f-48e6-47ce-b6ad-925f16648734 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.385532] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52591bc8-1f98-2c72-a6b0-326d98d4f542, 'name': SearchDatastore_Task, 'duration_secs': 0.010244} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.389763] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.389763] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] e74fd58c-cfa8-45c4-8f02-96234b4a9192/43ea607c-7ece-4601-9b11-75c6a16aa7dd-rescue.vmdk. {{(pid=62070) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 845.389763] env[62070]: DEBUG oslo_vmware.api [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 845.389763] env[62070]: value = "task-1121837" [ 845.389763] env[62070]: _type = "Task" [ 845.389763] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.389763] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a6375e4d-323d-41b2-8716-96c700fa7741 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.400814] env[62070]: DEBUG oslo_vmware.api [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121837, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.401777] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 845.401777] env[62070]: value = "task-1121838" [ 845.401777] env[62070]: _type = "Task" [ 845.401777] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.412368] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121838, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.427387] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "refresh_cache-c16d175c-0b23-4f72-bdb0-844c6f80fd32" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.427387] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquired lock "refresh_cache-c16d175c-0b23-4f72-bdb0-844c6f80fd32" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.427387] env[62070]: DEBUG nova.network.neutron [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 845.463579] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.466377] env[62070]: DEBUG nova.scheduler.client.report [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 845.469816] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3c937880-263f-4058-86ea-f550d2687498 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Releasing lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.500033] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5291d462-af0d-08f0-dff4-8ca67422cf49, 'name': SearchDatastore_Task, 'duration_secs': 0.021169} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.500509] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.502215] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] e5deccf6-f967-4e3c-bee0-2e1ad0bb4560/e5deccf6-f967-4e3c-bee0-2e1ad0bb4560.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 845.502215] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2b2586a1-a60e-4c7a-af88-7c6bf5bc0fa6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.512057] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 845.512057] env[62070]: value = "task-1121839" [ 845.512057] env[62070]: _type = "Task" [ 845.512057] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.525909] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121839, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.558341] env[62070]: DEBUG oslo_concurrency.lockutils [req-ba5c8240-d3ea-4106-9a38-eb4a637f8508 req-f217f2a0-9ca2-45bd-bf62-c3f018d3c386 service nova] Releasing lock "refresh_cache-e5deccf6-f967-4e3c-bee0-2e1ad0bb4560" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.714090] env[62070]: DEBUG nova.compute.manager [req-0fb24fee-6418-4165-9268-a927ee09cbdf req-6b5a391b-c5cf-4c87-8872-e88846de6f38 service nova] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Received event network-vif-deleted-0f1aed27-115c-407c-b787-21d92045c9fd {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 845.714414] env[62070]: INFO nova.compute.manager [req-0fb24fee-6418-4165-9268-a927ee09cbdf req-6b5a391b-c5cf-4c87-8872-e88846de6f38 service nova] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Neutron deleted interface 0f1aed27-115c-407c-b787-21d92045c9fd; detaching it from the instance and deleting it from the info cache [ 845.714690] env[62070]: DEBUG nova.network.neutron [req-0fb24fee-6418-4165-9268-a927ee09cbdf req-6b5a391b-c5cf-4c87-8872-e88846de6f38 service nova] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.764208] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121836, 'name': PowerOnVM_Task} progress is 92%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.883988] env[62070]: DEBUG nova.compute.manager [req-96bf6411-3f1a-4a44-b63c-5980ec87523e req-63cc3924-6836-48df-b4db-7dbc3da778bf service nova] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Received event network-vif-deleted-43964a23-7533-4cae-9837-85e33059c929 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 845.884376] env[62070]: DEBUG nova.compute.manager [req-96bf6411-3f1a-4a44-b63c-5980ec87523e req-63cc3924-6836-48df-b4db-7dbc3da778bf service nova] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Received event network-vif-plugged-5a04b813-465f-4855-8707-4db273ba30fd {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 845.884519] env[62070]: DEBUG oslo_concurrency.lockutils [req-96bf6411-3f1a-4a44-b63c-5980ec87523e req-63cc3924-6836-48df-b4db-7dbc3da778bf service nova] Acquiring lock "c16d175c-0b23-4f72-bdb0-844c6f80fd32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.884985] env[62070]: DEBUG oslo_concurrency.lockutils [req-96bf6411-3f1a-4a44-b63c-5980ec87523e req-63cc3924-6836-48df-b4db-7dbc3da778bf service nova] Lock "c16d175c-0b23-4f72-bdb0-844c6f80fd32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.885152] env[62070]: DEBUG oslo_concurrency.lockutils [req-96bf6411-3f1a-4a44-b63c-5980ec87523e req-63cc3924-6836-48df-b4db-7dbc3da778bf service nova] Lock "c16d175c-0b23-4f72-bdb0-844c6f80fd32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.885336] env[62070]: DEBUG nova.compute.manager [req-96bf6411-3f1a-4a44-b63c-5980ec87523e req-63cc3924-6836-48df-b4db-7dbc3da778bf service nova] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] No waiting events found dispatching network-vif-plugged-5a04b813-465f-4855-8707-4db273ba30fd {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 845.885540] env[62070]: WARNING nova.compute.manager [req-96bf6411-3f1a-4a44-b63c-5980ec87523e req-63cc3924-6836-48df-b4db-7dbc3da778bf service nova] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Received unexpected event network-vif-plugged-5a04b813-465f-4855-8707-4db273ba30fd for instance with vm_state building and task_state spawning. [ 845.885736] env[62070]: DEBUG nova.compute.manager [req-96bf6411-3f1a-4a44-b63c-5980ec87523e req-63cc3924-6836-48df-b4db-7dbc3da778bf service nova] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Received event network-changed-5a04b813-465f-4855-8707-4db273ba30fd {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 845.885898] env[62070]: DEBUG nova.compute.manager [req-96bf6411-3f1a-4a44-b63c-5980ec87523e req-63cc3924-6836-48df-b4db-7dbc3da778bf service nova] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Refreshing instance network info cache due to event network-changed-5a04b813-465f-4855-8707-4db273ba30fd. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 845.886084] env[62070]: DEBUG oslo_concurrency.lockutils [req-96bf6411-3f1a-4a44-b63c-5980ec87523e req-63cc3924-6836-48df-b4db-7dbc3da778bf service nova] Acquiring lock "refresh_cache-c16d175c-0b23-4f72-bdb0-844c6f80fd32" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.905971] env[62070]: DEBUG nova.network.neutron [-] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.908529] env[62070]: DEBUG oslo_vmware.api [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121837, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.928203] env[62070]: DEBUG nova.compute.manager [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 845.930848] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121838, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.463561} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.937553] env[62070]: INFO nova.virt.vmwareapi.ds_util [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] e74fd58c-cfa8-45c4-8f02-96234b4a9192/43ea607c-7ece-4601-9b11-75c6a16aa7dd-rescue.vmdk. [ 845.941244] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6991456f-64a7-48c7-af7b-282131efb51f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.977785] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] e74fd58c-cfa8-45c4-8f02-96234b4a9192/43ea607c-7ece-4601-9b11-75c6a16aa7dd-rescue.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 845.981652] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.094s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.984415] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c937880-263f-4058-86ea-f550d2687498 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 845.984731] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24a6546e-3e39-4720-81fd-91c010ebdaff {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.002052] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.222s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.004142] env[62070]: INFO nova.compute.claims [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 846.009776] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a67e210b-fb20-465d-bd29-42545c7f6e26 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.011681] env[62070]: DEBUG nova.virt.hardware [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 846.012133] env[62070]: DEBUG nova.virt.hardware [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 846.012133] env[62070]: DEBUG nova.virt.hardware [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 846.012291] env[62070]: DEBUG nova.virt.hardware [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 846.012426] env[62070]: DEBUG nova.virt.hardware [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 846.012569] env[62070]: DEBUG nova.virt.hardware [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 846.012805] env[62070]: DEBUG nova.virt.hardware [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 846.013279] env[62070]: DEBUG nova.virt.hardware [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 846.013279] env[62070]: DEBUG nova.virt.hardware [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 846.014151] env[62070]: DEBUG nova.virt.hardware [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 846.014151] env[62070]: DEBUG nova.virt.hardware [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 846.015066] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ee2e27-1015-46cb-bfc1-ee06a6ab2ed6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.024045] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 846.024045] env[62070]: value = "task-1121840" [ 846.024045] env[62070]: _type = "Task" [ 846.024045] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.026067] env[62070]: DEBUG oslo_vmware.api [None req-3c937880-263f-4058-86ea-f550d2687498 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 846.026067] env[62070]: value = "task-1121841" [ 846.026067] env[62070]: _type = "Task" [ 846.026067] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.035209] env[62070]: INFO nova.scheduler.client.report [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Deleted allocations for instance 0ac963b1-120a-464b-8228-3393135dd182 [ 846.048880] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121839, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.048880] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02cf715c-7213-444a-adcc-5efbb2a868f7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.057304] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121840, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.066754] env[62070]: DEBUG oslo_vmware.api [None req-3c937880-263f-4058-86ea-f550d2687498 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121841, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.081709] env[62070]: DEBUG nova.network.neutron [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 846.219782] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-70bdcf6c-095b-4c86-9518-7536b0998c65 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.230994] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad6dac3-e892-40c0-b26c-6669bc6ec89b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.271950] env[62070]: DEBUG nova.compute.manager [req-0fb24fee-6418-4165-9268-a927ee09cbdf req-6b5a391b-c5cf-4c87-8872-e88846de6f38 service nova] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Detach interface failed, port_id=0f1aed27-115c-407c-b787-21d92045c9fd, reason: Instance 65fe3720-95cb-4620-b1c7-eae9e3bc3943 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 846.272571] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121836, 'name': PowerOnVM_Task, 'duration_secs': 0.580981} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.272707] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 846.272910] env[62070]: INFO nova.compute.manager [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Took 9.41 seconds to spawn the instance on the hypervisor. [ 846.273101] env[62070]: DEBUG nova.compute.manager [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 846.274021] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27acda2a-3019-48ff-929b-b80d0c6f53cc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.403020] env[62070]: DEBUG oslo_vmware.api [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121837, 'name': ReconfigVM_Task, 'duration_secs': 0.586648} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.403511] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 3d22f50a-e1b7-48f9-a044-df64d01dfeb4/43ea607c-7ece-4601-9b11-75c6a16aa7dd-rescue.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 846.406077] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92dc6ccb-f2d2-48ee-b60b-4a46a2589ebf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.413419] env[62070]: INFO nova.compute.manager [-] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Took 2.54 seconds to deallocate network for instance. [ 846.447791] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-035f3e95-c14f-4931-be61-89985a485b3e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.470927] env[62070]: DEBUG oslo_vmware.api [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 846.470927] env[62070]: value = "task-1121842" [ 846.470927] env[62070]: _type = "Task" [ 846.470927] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.483110] env[62070]: DEBUG oslo_vmware.api [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121842, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.494565] env[62070]: DEBUG nova.network.neutron [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Successfully created port: c4a1b7aa-611b-422e-9678-70513f52b764 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 846.531464] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121839, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.679576} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.538421] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] e5deccf6-f967-4e3c-bee0-2e1ad0bb4560/e5deccf6-f967-4e3c-bee0-2e1ad0bb4560.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 846.538669] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 846.539923] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-551611a6-6748-4903-a360-e427d2e1e61c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.549684] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121840, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.554686] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 846.554686] env[62070]: value = "task-1121843" [ 846.554686] env[62070]: _type = "Task" [ 846.554686] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.557733] env[62070]: DEBUG oslo_vmware.api [None req-3c937880-263f-4058-86ea-f550d2687498 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121841, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.558256] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b541edfd-baeb-42f7-9f84-c3dcd80fd4e7 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "0ac963b1-120a-464b-8228-3393135dd182" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.658s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.569723] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121843, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.630763] env[62070]: DEBUG nova.network.neutron [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Updating instance_info_cache with network_info: [{"id": "5a04b813-465f-4855-8707-4db273ba30fd", "address": "fa:16:3e:8f:c5:5e", "network": {"id": "0d81bd04-b549-4e1f-97a2-0a0b9391dd3f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-108214409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c91e5eeeeb1742f499b2edaf76a93a3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a04b813-46", "ovs_interfaceid": "5a04b813-465f-4855-8707-4db273ba30fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.792900] env[62070]: INFO nova.compute.manager [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Took 41.68 seconds to build instance. [ 846.946756] env[62070]: DEBUG oslo_concurrency.lockutils [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.985963] env[62070]: DEBUG oslo_vmware.api [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121842, 'name': ReconfigVM_Task, 'duration_secs': 0.177497} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.989966] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 846.989966] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-779d6394-a73d-499d-976b-1b04328a9392 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.000339] env[62070]: DEBUG oslo_vmware.api [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 847.000339] env[62070]: value = "task-1121844" [ 847.000339] env[62070]: _type = "Task" [ 847.000339] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.013172] env[62070]: DEBUG oslo_vmware.api [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121844, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.046159] env[62070]: DEBUG oslo_vmware.api [None req-3c937880-263f-4058-86ea-f550d2687498 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121841, 'name': PowerOnVM_Task, 'duration_secs': 0.71478} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.049375] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c937880-263f-4058-86ea-f550d2687498 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 847.050213] env[62070]: DEBUG nova.compute.manager [None req-3c937880-263f-4058-86ea-f550d2687498 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 847.050213] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121840, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.051032] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8711ab72-fb56-4b2c-9fcf-8265d743ce23 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.068819] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121843, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079464} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.071591] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 847.072959] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3998e5e9-96cb-4937-a57c-516496ea9841 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.097378] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] e5deccf6-f967-4e3c-bee0-2e1ad0bb4560/e5deccf6-f967-4e3c-bee0-2e1ad0bb4560.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 847.100345] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07d10830-80ff-4e26-b374-ff6afccc12c3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.122239] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 847.122239] env[62070]: value = "task-1121845" [ 847.122239] env[62070]: _type = "Task" [ 847.122239] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.133707] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Releasing lock "refresh_cache-c16d175c-0b23-4f72-bdb0-844c6f80fd32" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.134064] env[62070]: DEBUG nova.compute.manager [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Instance network_info: |[{"id": "5a04b813-465f-4855-8707-4db273ba30fd", "address": "fa:16:3e:8f:c5:5e", "network": {"id": "0d81bd04-b549-4e1f-97a2-0a0b9391dd3f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-108214409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c91e5eeeeb1742f499b2edaf76a93a3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a04b813-46", "ovs_interfaceid": "5a04b813-465f-4855-8707-4db273ba30fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 847.134363] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121845, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.137388] env[62070]: DEBUG oslo_concurrency.lockutils [req-96bf6411-3f1a-4a44-b63c-5980ec87523e req-63cc3924-6836-48df-b4db-7dbc3da778bf service nova] Acquired lock "refresh_cache-c16d175c-0b23-4f72-bdb0-844c6f80fd32" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.137724] env[62070]: DEBUG nova.network.neutron [req-96bf6411-3f1a-4a44-b63c-5980ec87523e req-63cc3924-6836-48df-b4db-7dbc3da778bf service nova] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Refreshing network info cache for port 5a04b813-465f-4855-8707-4db273ba30fd {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 847.138903] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:c5:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cf5bfbae-a882-4d34-be33-b31e274b3077', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5a04b813-465f-4855-8707-4db273ba30fd', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 847.147483] env[62070]: DEBUG oslo.service.loopingcall [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 847.151242] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 847.151747] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-16ddad28-5cc9-41e6-ae97-560a99051fd4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.178674] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 847.178674] env[62070]: value = "task-1121846" [ 847.178674] env[62070]: _type = "Task" [ 847.178674] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.191998] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121846, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.294573] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "27987ff6-77c9-4876-8b39-dcc20ce4158a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.260s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.418371] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Acquiring lock "71c98ac8-4149-448b-bf0c-3bfdcc8f50ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.419679] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Lock "71c98ac8-4149-448b-bf0c-3bfdcc8f50ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.516286] env[62070]: DEBUG oslo_vmware.api [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121844, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.518446] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c0a7e73-8252-4fff-95bb-96b2631ea610 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.527298] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e25f393-583f-4425-a1b8-61770bef6d12 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.563767] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1f90ee7-9ad3-45d3-b365-b0e3bc21d5fa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.572133] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121840, 'name': ReconfigVM_Task, 'duration_secs': 1.038341} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.573725] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Reconfigured VM instance instance-00000040 to attach disk [datastore1] e74fd58c-cfa8-45c4-8f02-96234b4a9192/43ea607c-7ece-4601-9b11-75c6a16aa7dd-rescue.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 847.574831] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-249b6faa-a5d1-40e6-8125-44b4c63a96e8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.581441] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67da591a-b4ff-421a-9aec-03d35a1e6503 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.611842] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10db08e4-0f69-43cb-9526-c8cb5f4e629a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.632489] env[62070]: DEBUG nova.compute.provider_tree [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 847.644120] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121845, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.646413] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 847.646413] env[62070]: value = "task-1121847" [ 847.646413] env[62070]: _type = "Task" [ 847.646413] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.658387] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121847, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.690393] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121846, 'name': CreateVM_Task, 'duration_secs': 0.48652} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.690393] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 847.691368] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.692539] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.692539] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 847.692539] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bc9edea-2a74-4441-b77d-2a8d9a4f2db7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.702219] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 847.702219] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]528f7fd8-4d4d-a448-a971-b85433e35462" [ 847.702219] env[62070]: _type = "Task" [ 847.702219] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.710774] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]528f7fd8-4d4d-a448-a971-b85433e35462, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.922291] env[62070]: DEBUG nova.compute.manager [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 848.013904] env[62070]: DEBUG oslo_vmware.api [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121844, 'name': PowerOnVM_Task, 'duration_secs': 0.53599} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.014565] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 848.020313] env[62070]: DEBUG nova.compute.manager [None req-e6ec19d6-6f98-4a3b-a887-49dc36df65de tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 848.020313] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-669f418f-d09b-4ae8-8f67-134d35980792 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.131392] env[62070]: DEBUG nova.network.neutron [req-96bf6411-3f1a-4a44-b63c-5980ec87523e req-63cc3924-6836-48df-b4db-7dbc3da778bf service nova] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Updated VIF entry in instance network info cache for port 5a04b813-465f-4855-8707-4db273ba30fd. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 848.131825] env[62070]: DEBUG nova.network.neutron [req-96bf6411-3f1a-4a44-b63c-5980ec87523e req-63cc3924-6836-48df-b4db-7dbc3da778bf service nova] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Updating instance_info_cache with network_info: [{"id": "5a04b813-465f-4855-8707-4db273ba30fd", "address": "fa:16:3e:8f:c5:5e", "network": {"id": "0d81bd04-b549-4e1f-97a2-0a0b9391dd3f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-108214409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c91e5eeeeb1742f499b2edaf76a93a3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a04b813-46", "ovs_interfaceid": "5a04b813-465f-4855-8707-4db273ba30fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.140611] env[62070]: DEBUG nova.scheduler.client.report [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 848.157255] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121845, 'name': ReconfigVM_Task, 'duration_secs': 0.758256} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.158170] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Reconfigured VM instance instance-00000043 to attach disk [datastore2] e5deccf6-f967-4e3c-bee0-2e1ad0bb4560/e5deccf6-f967-4e3c-bee0-2e1ad0bb4560.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 848.159399] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7ec3945f-2672-486b-861c-c04c5829ce78 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.168971] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121847, 'name': ReconfigVM_Task, 'duration_secs': 0.242203} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.169407] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 848.169573] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2ad50e84-a7ca-4dc4-958f-c4656752d160 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.178400] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 848.178400] env[62070]: value = "task-1121848" [ 848.178400] env[62070]: _type = "Task" [ 848.178400] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.182270] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 848.182270] env[62070]: value = "task-1121849" [ 848.182270] env[62070]: _type = "Task" [ 848.182270] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.194267] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121848, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.201060] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121849, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.215245] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]528f7fd8-4d4d-a448-a971-b85433e35462, 'name': SearchDatastore_Task, 'duration_secs': 0.015075} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.215689] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.215845] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 848.216112] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.216273] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.216530] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 848.216790] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cacad4a2-69f4-4868-8a2c-70c797992810 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.229301] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 848.229546] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 848.230358] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-beb4445a-c052-46fc-bc7a-82ce94e31516 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.238915] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 848.238915] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52c721c6-0478-8b0b-0077-c5d43abf3c88" [ 848.238915] env[62070]: _type = "Task" [ 848.238915] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.249486] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52c721c6-0478-8b0b-0077-c5d43abf3c88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.454050] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.635850] env[62070]: DEBUG oslo_concurrency.lockutils [req-96bf6411-3f1a-4a44-b63c-5980ec87523e req-63cc3924-6836-48df-b4db-7dbc3da778bf service nova] Releasing lock "refresh_cache-c16d175c-0b23-4f72-bdb0-844c6f80fd32" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.646708] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.644s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.646821] env[62070]: DEBUG nova.compute.manager [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 848.655821] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.686s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.655821] env[62070]: INFO nova.compute.claims [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 848.697146] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121849, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.698147] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121848, 'name': Rename_Task, 'duration_secs': 0.218114} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.698571] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 848.698976] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-03f2646f-1c6b-495a-987e-1d535416fbae {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.712360] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 848.712360] env[62070]: value = "task-1121850" [ 848.712360] env[62070]: _type = "Task" [ 848.712360] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.727742] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121850, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.756948] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52c721c6-0478-8b0b-0077-c5d43abf3c88, 'name': SearchDatastore_Task, 'duration_secs': 0.014286} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.757758] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e455a19f-86e4-4eea-95ca-e4159d09d4ce {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.764784] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 848.764784] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]529d6fa8-2d90-af50-94c3-d1b761c05218" [ 848.764784] env[62070]: _type = "Task" [ 848.764784] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.778110] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]529d6fa8-2d90-af50-94c3-d1b761c05218, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.154615] env[62070]: DEBUG nova.compute.utils [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 849.156150] env[62070]: DEBUG nova.compute.manager [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Not allocating networking since 'none' was specified. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 849.197794] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121849, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.220919] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121850, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.277426] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]529d6fa8-2d90-af50-94c3-d1b761c05218, 'name': SearchDatastore_Task, 'duration_secs': 0.050947} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.277594] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.277875] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] c16d175c-0b23-4f72-bdb0-844c6f80fd32/c16d175c-0b23-4f72-bdb0-844c6f80fd32.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 849.278164] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0648fac9-73d3-492f-bf92-966a75ed0c2e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.286246] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 849.286246] env[62070]: value = "task-1121851" [ 849.286246] env[62070]: _type = "Task" [ 849.286246] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.297093] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121851, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.477238] env[62070]: DEBUG nova.network.neutron [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Successfully updated port: c4a1b7aa-611b-422e-9678-70513f52b764 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 849.535555] env[62070]: DEBUG nova.compute.manager [req-762cf75a-8842-43b8-86f2-db4b110f1142 req-e40e43d0-6d42-4bfe-9215-245c5475bebc service nova] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Received event network-vif-plugged-c4a1b7aa-611b-422e-9678-70513f52b764 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 849.535802] env[62070]: DEBUG oslo_concurrency.lockutils [req-762cf75a-8842-43b8-86f2-db4b110f1142 req-e40e43d0-6d42-4bfe-9215-245c5475bebc service nova] Acquiring lock "62758a38-4819-4d5a-97ed-db6c9ceb97bf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.536601] env[62070]: DEBUG oslo_concurrency.lockutils [req-762cf75a-8842-43b8-86f2-db4b110f1142 req-e40e43d0-6d42-4bfe-9215-245c5475bebc service nova] Lock "62758a38-4819-4d5a-97ed-db6c9ceb97bf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.536886] env[62070]: DEBUG oslo_concurrency.lockutils [req-762cf75a-8842-43b8-86f2-db4b110f1142 req-e40e43d0-6d42-4bfe-9215-245c5475bebc service nova] Lock "62758a38-4819-4d5a-97ed-db6c9ceb97bf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.537095] env[62070]: DEBUG nova.compute.manager [req-762cf75a-8842-43b8-86f2-db4b110f1142 req-e40e43d0-6d42-4bfe-9215-245c5475bebc service nova] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] No waiting events found dispatching network-vif-plugged-c4a1b7aa-611b-422e-9678-70513f52b764 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 849.537310] env[62070]: WARNING nova.compute.manager [req-762cf75a-8842-43b8-86f2-db4b110f1142 req-e40e43d0-6d42-4bfe-9215-245c5475bebc service nova] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Received unexpected event network-vif-plugged-c4a1b7aa-611b-422e-9678-70513f52b764 for instance with vm_state building and task_state spawning. [ 849.660546] env[62070]: DEBUG nova.compute.manager [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 849.700933] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121849, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.723219] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121850, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.801945] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121851, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.982947] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "refresh_cache-62758a38-4819-4d5a-97ed-db6c9ceb97bf" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.983156] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired lock "refresh_cache-62758a38-4819-4d5a-97ed-db6c9ceb97bf" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.983309] env[62070]: DEBUG nova.network.neutron [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 850.098740] env[62070]: INFO nova.compute.manager [None req-7f94bbb7-1fd6-441b-8175-a6f259611f31 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Unrescuing [ 850.100811] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7f94bbb7-1fd6-441b-8175-a6f259611f31 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "refresh_cache-3d22f50a-e1b7-48f9-a044-df64d01dfeb4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.100811] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7f94bbb7-1fd6-441b-8175-a6f259611f31 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquired lock "refresh_cache-3d22f50a-e1b7-48f9-a044-df64d01dfeb4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.100811] env[62070]: DEBUG nova.network.neutron [None req-7f94bbb7-1fd6-441b-8175-a6f259611f31 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 850.167018] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6ec7962-9734-4618-bf6a-821962c94ada {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.186759] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a733c6c6-7c42-4eb3-8434-004e73d465f7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.237710] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121849, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.242650] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293ad44f-76be-40d7-bfc8-e2a8b6ba0be4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.253792] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121850, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.255684] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4a33182-79cd-40d8-9df8-274f692b1a78 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.270494] env[62070]: DEBUG nova.compute.provider_tree [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.298288] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121851, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.83708} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.298550] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] c16d175c-0b23-4f72-bdb0-844c6f80fd32/c16d175c-0b23-4f72-bdb0-844c6f80fd32.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 850.298764] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 850.302020] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7d271403-ddca-41f4-9487-963e98463f43 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.307590] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 850.307590] env[62070]: value = "task-1121852" [ 850.307590] env[62070]: _type = "Task" [ 850.307590] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.316341] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121852, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.565152] env[62070]: DEBUG nova.network.neutron [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 850.682075] env[62070]: DEBUG nova.compute.manager [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 850.704953] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121849, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.715770] env[62070]: DEBUG nova.virt.hardware [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 850.716149] env[62070]: DEBUG nova.virt.hardware [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 850.716149] env[62070]: DEBUG nova.virt.hardware [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 850.716344] env[62070]: DEBUG nova.virt.hardware [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 850.716493] env[62070]: DEBUG nova.virt.hardware [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 850.716673] env[62070]: DEBUG nova.virt.hardware [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 850.716896] env[62070]: DEBUG nova.virt.hardware [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 850.717079] env[62070]: DEBUG nova.virt.hardware [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 850.717255] env[62070]: DEBUG nova.virt.hardware [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 850.717425] env[62070]: DEBUG nova.virt.hardware [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 850.717635] env[62070]: DEBUG nova.virt.hardware [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 850.718620] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e89ffaf0-24d2-45e1-93f3-71f98ced3e8d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.727869] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62729aca-8ab4-41cc-a185-2bebd3b81ae8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.750878] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Instance VIF info [] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 850.758076] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Creating folder: Project (8e5b406e3b224b809ce37b3b95ce53a1). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 850.758815] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bb7f1581-42ef-4e14-8d8e-c68cb1bbfcab {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.765062] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121850, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.771493] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Created folder: Project (8e5b406e3b224b809ce37b3b95ce53a1) in parent group-v245319. [ 850.771844] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Creating folder: Instances. Parent ref: group-v245424. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 850.772010] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0bced916-d196-4eb3-93d3-1c5b96ce7d4f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.774586] env[62070]: DEBUG nova.scheduler.client.report [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 850.791023] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Created folder: Instances in parent group-v245424. [ 850.791023] env[62070]: DEBUG oslo.service.loopingcall [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 850.791023] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 850.791023] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-48c346cc-c970-4b11-ad36-ebadbeaca578 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.815023] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 850.815023] env[62070]: value = "task-1121855" [ 850.815023] env[62070]: _type = "Task" [ 850.815023] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.828698] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121852, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.837231] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121855, 'name': CreateVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.052303] env[62070]: DEBUG nova.network.neutron [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Updating instance_info_cache with network_info: [{"id": "c4a1b7aa-611b-422e-9678-70513f52b764", "address": "fa:16:3e:12:de:d6", "network": {"id": "5ea0fffc-372c-450e-b27b-10959077d58f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1853458988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9191f0e6c2ee401abca64c0780e230bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3c995e9-7f2f-420c-880a-d60da6e708ad", "external-id": "nsx-vlan-transportzone-166", "segmentation_id": 166, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4a1b7aa-61", "ovs_interfaceid": "c4a1b7aa-611b-422e-9678-70513f52b764", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.200197] env[62070]: DEBUG nova.network.neutron [None req-7f94bbb7-1fd6-441b-8175-a6f259611f31 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Updating instance_info_cache with network_info: [{"id": "aef8b9b0-4bbd-4af6-b65d-f7e964775fd4", "address": "fa:16:3e:be:d4:45", "network": {"id": "754f4ec8-0bc6-4726-8b88-1a4e1a326699", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-293486644-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a94db233e3a43dc9aa7cf887c6cb1f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaef8b9b0-4b", "ovs_interfaceid": "aef8b9b0-4bbd-4af6-b65d-f7e964775fd4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.211515] env[62070]: DEBUG oslo_vmware.api [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121849, 'name': PowerOnVM_Task, 'duration_secs': 2.575475} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.211851] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 851.215784] env[62070]: DEBUG nova.compute.manager [None req-c218b909-099b-46da-812a-385709088649 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 851.217314] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6766b4b3-435a-4587-b410-8ecfa294efc9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.250755] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121850, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.280427] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.627s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.281171] env[62070]: DEBUG nova.compute.manager [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 851.285411] env[62070]: DEBUG oslo_concurrency.lockutils [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.884s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.287026] env[62070]: INFO nova.compute.claims [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 851.303461] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "21bcb1a6-833b-48f3-8ee2-0e49c64a104f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.303948] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "21bcb1a6-833b-48f3-8ee2-0e49c64a104f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.332314] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121852, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.339503] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121855, 'name': CreateVM_Task} progress is 15%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.381706] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Acquiring lock "559eee5b-0834-4dcf-a436-5e58644c7a3b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.382042] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Lock "559eee5b-0834-4dcf-a436-5e58644c7a3b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.558019] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Releasing lock "refresh_cache-62758a38-4819-4d5a-97ed-db6c9ceb97bf" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.558019] env[62070]: DEBUG nova.compute.manager [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Instance network_info: |[{"id": "c4a1b7aa-611b-422e-9678-70513f52b764", "address": "fa:16:3e:12:de:d6", "network": {"id": "5ea0fffc-372c-450e-b27b-10959077d58f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1853458988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9191f0e6c2ee401abca64c0780e230bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3c995e9-7f2f-420c-880a-d60da6e708ad", "external-id": "nsx-vlan-transportzone-166", "segmentation_id": 166, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4a1b7aa-61", "ovs_interfaceid": "c4a1b7aa-611b-422e-9678-70513f52b764", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 851.558019] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:de:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3c995e9-7f2f-420c-880a-d60da6e708ad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c4a1b7aa-611b-422e-9678-70513f52b764', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 851.565797] env[62070]: DEBUG oslo.service.loopingcall [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 851.566479] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 851.566889] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dd54ff1f-ae39-4d4f-a718-88b003435a88 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.602024] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 851.602024] env[62070]: value = "task-1121856" [ 851.602024] env[62070]: _type = "Task" [ 851.602024] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.611412] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121856, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.704052] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7f94bbb7-1fd6-441b-8175-a6f259611f31 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Releasing lock "refresh_cache-3d22f50a-e1b7-48f9-a044-df64d01dfeb4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.704754] env[62070]: DEBUG nova.objects.instance [None req-7f94bbb7-1fd6-441b-8175-a6f259611f31 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lazy-loading 'flavor' on Instance uuid 3d22f50a-e1b7-48f9-a044-df64d01dfeb4 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 851.761199] env[62070]: DEBUG oslo_vmware.api [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121850, 'name': PowerOnVM_Task, 'duration_secs': 2.897145} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.761511] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 851.761695] env[62070]: INFO nova.compute.manager [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Took 12.23 seconds to spawn the instance on the hypervisor. [ 851.761873] env[62070]: DEBUG nova.compute.manager [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 851.762922] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6552a1-1757-4137-881d-e0dce72207fb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.786890] env[62070]: DEBUG nova.compute.utils [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 851.792418] env[62070]: DEBUG nova.compute.manager [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 851.792640] env[62070]: DEBUG nova.network.neutron [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 851.807813] env[62070]: DEBUG nova.compute.manager [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 851.828583] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121852, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.173354} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.829923] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 851.830384] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df45b38-36f8-4df8-a7d7-2bd04a549b58 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.837252] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121855, 'name': CreateVM_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.863377] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] c16d175c-0b23-4f72-bdb0-844c6f80fd32/c16d175c-0b23-4f72-bdb0-844c6f80fd32.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 851.865949] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0fdb5e4c-99cd-49bb-83a3-46acfa75aff0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.886159] env[62070]: DEBUG nova.compute.manager [req-f9165459-942c-438a-922a-6fd3e8d9ac9a req-3eb5a622-17d0-4660-ae43-1d0b5d07f4a4 service nova] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Received event network-changed-c4a1b7aa-611b-422e-9678-70513f52b764 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 851.886357] env[62070]: DEBUG nova.compute.manager [req-f9165459-942c-438a-922a-6fd3e8d9ac9a req-3eb5a622-17d0-4660-ae43-1d0b5d07f4a4 service nova] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Refreshing instance network info cache due to event network-changed-c4a1b7aa-611b-422e-9678-70513f52b764. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 851.886554] env[62070]: DEBUG oslo_concurrency.lockutils [req-f9165459-942c-438a-922a-6fd3e8d9ac9a req-3eb5a622-17d0-4660-ae43-1d0b5d07f4a4 service nova] Acquiring lock "refresh_cache-62758a38-4819-4d5a-97ed-db6c9ceb97bf" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.886716] env[62070]: DEBUG oslo_concurrency.lockutils [req-f9165459-942c-438a-922a-6fd3e8d9ac9a req-3eb5a622-17d0-4660-ae43-1d0b5d07f4a4 service nova] Acquired lock "refresh_cache-62758a38-4819-4d5a-97ed-db6c9ceb97bf" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.886857] env[62070]: DEBUG nova.network.neutron [req-f9165459-942c-438a-922a-6fd3e8d9ac9a req-3eb5a622-17d0-4660-ae43-1d0b5d07f4a4 service nova] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Refreshing network info cache for port c4a1b7aa-611b-422e-9678-70513f52b764 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 851.889470] env[62070]: DEBUG nova.policy [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'df3a0462a58d471ca489e07786a8a385', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd85661855bb646e1935f013cb2607aec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 851.891813] env[62070]: DEBUG nova.compute.manager [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 851.902191] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 851.902191] env[62070]: value = "task-1121857" [ 851.902191] env[62070]: _type = "Task" [ 851.902191] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.913561] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121857, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.111670] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121856, 'name': CreateVM_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.214544] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1502669-8879-4ed4-b66f-2db0fe929ce9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.251106] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f94bbb7-1fd6-441b-8175-a6f259611f31 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 852.251352] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-672bde82-2e78-4765-b2a9-8dc31eec2cab {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.263895] env[62070]: DEBUG oslo_vmware.api [None req-7f94bbb7-1fd6-441b-8175-a6f259611f31 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 852.263895] env[62070]: value = "task-1121858" [ 852.263895] env[62070]: _type = "Task" [ 852.263895] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.271378] env[62070]: DEBUG oslo_vmware.api [None req-7f94bbb7-1fd6-441b-8175-a6f259611f31 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121858, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.283374] env[62070]: INFO nova.compute.manager [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Took 43.31 seconds to build instance. [ 852.291364] env[62070]: DEBUG nova.compute.manager [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 852.344381] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121855, 'name': CreateVM_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.345194] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.392856] env[62070]: INFO nova.compute.manager [None req-03acc965-7789-4e44-9beb-b0ccc1f1df38 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Unrescuing [ 852.392976] env[62070]: DEBUG oslo_concurrency.lockutils [None req-03acc965-7789-4e44-9beb-b0ccc1f1df38 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquiring lock "refresh_cache-e74fd58c-cfa8-45c4-8f02-96234b4a9192" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.393157] env[62070]: DEBUG oslo_concurrency.lockutils [None req-03acc965-7789-4e44-9beb-b0ccc1f1df38 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquired lock "refresh_cache-e74fd58c-cfa8-45c4-8f02-96234b4a9192" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.393338] env[62070]: DEBUG nova.network.neutron [None req-03acc965-7789-4e44-9beb-b0ccc1f1df38 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 852.420665] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121857, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.426151] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.622836] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121856, 'name': CreateVM_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.775286] env[62070]: DEBUG oslo_vmware.api [None req-7f94bbb7-1fd6-441b-8175-a6f259611f31 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121858, 'name': PowerOffVM_Task, 'duration_secs': 0.287995} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.775614] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f94bbb7-1fd6-441b-8175-a6f259611f31 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 852.782103] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f94bbb7-1fd6-441b-8175-a6f259611f31 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Reconfiguring VM instance instance-0000003d to detach disk 2001 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 852.786368] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b9053a65-12f2-4ae8-92bc-10078ef2c0a3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.805480] env[62070]: DEBUG oslo_concurrency.lockutils [None req-21e9fdd7-ac3e-4fb3-a962-ff9dd1a66c7f tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "e5deccf6-f967-4e3c-bee0-2e1ad0bb4560" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.741s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.806726] env[62070]: DEBUG nova.network.neutron [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Successfully created port: b1394ea1-e455-496f-9aa1-6eacc606ec0a {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 852.822089] env[62070]: DEBUG oslo_vmware.api [None req-7f94bbb7-1fd6-441b-8175-a6f259611f31 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 852.822089] env[62070]: value = "task-1121859" [ 852.822089] env[62070]: _type = "Task" [ 852.822089] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.838863] env[62070]: DEBUG oslo_vmware.api [None req-7f94bbb7-1fd6-441b-8175-a6f259611f31 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121859, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.841039] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121855, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.863009] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28449624-b4a0-46d8-9974-e630cbc0fd35 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.873081] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31e959e1-9a05-4a37-82d8-9881985121e6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.921505] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b805cf4-0639-4195-805f-5b43143b71ad {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.934333] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7df19fba-5ac1-4c84-9979-64da0534a8a0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.939229] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121857, 'name': ReconfigVM_Task, 'duration_secs': 0.584361} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.939749] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Reconfigured VM instance instance-00000044 to attach disk [datastore2] c16d175c-0b23-4f72-bdb0-844c6f80fd32/c16d175c-0b23-4f72-bdb0-844c6f80fd32.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 852.941281] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fbf05264-5b6a-4884-b2b9-86fec2111470 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.955182] env[62070]: DEBUG nova.compute.provider_tree [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 852.958351] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 852.958351] env[62070]: value = "task-1121860" [ 852.958351] env[62070]: _type = "Task" [ 852.958351] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.969076] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121860, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.090428] env[62070]: DEBUG nova.network.neutron [req-f9165459-942c-438a-922a-6fd3e8d9ac9a req-3eb5a622-17d0-4660-ae43-1d0b5d07f4a4 service nova] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Updated VIF entry in instance network info cache for port c4a1b7aa-611b-422e-9678-70513f52b764. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 853.090812] env[62070]: DEBUG nova.network.neutron [req-f9165459-942c-438a-922a-6fd3e8d9ac9a req-3eb5a622-17d0-4660-ae43-1d0b5d07f4a4 service nova] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Updating instance_info_cache with network_info: [{"id": "c4a1b7aa-611b-422e-9678-70513f52b764", "address": "fa:16:3e:12:de:d6", "network": {"id": "5ea0fffc-372c-450e-b27b-10959077d58f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1853458988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9191f0e6c2ee401abca64c0780e230bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3c995e9-7f2f-420c-880a-d60da6e708ad", "external-id": "nsx-vlan-transportzone-166", "segmentation_id": 166, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4a1b7aa-61", "ovs_interfaceid": "c4a1b7aa-611b-422e-9678-70513f52b764", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.115841] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121856, 'name': CreateVM_Task, 'duration_secs': 1.415334} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.116060] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 853.116780] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.117426] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.117426] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 853.117637] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d95887a-19d2-40c8-9dd3-fcc2d6bed678 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.126348] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 853.126348] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]524afba0-1280-4dc5-2d0a-698586ae1afe" [ 853.126348] env[62070]: _type = "Task" [ 853.126348] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.137853] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]524afba0-1280-4dc5-2d0a-698586ae1afe, 'name': SearchDatastore_Task, 'duration_secs': 0.009859} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.137937] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.138211] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 853.138441] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.138776] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.138841] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 853.139090] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cf03ed9f-ab4e-4772-83df-6a7311533c6a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.149286] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 853.149490] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 853.150303] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b786609-67fa-4ead-aed6-78e17bcbe7f0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.157383] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 853.157383] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]527a0d55-4c35-621f-4118-6b7555c7162c" [ 853.157383] env[62070]: _type = "Task" [ 853.157383] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.167158] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527a0d55-4c35-621f-4118-6b7555c7162c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.211033] env[62070]: DEBUG nova.network.neutron [None req-03acc965-7789-4e44-9beb-b0ccc1f1df38 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Updating instance_info_cache with network_info: [{"id": "6444a30f-9c50-4eaf-b562-178b627dc0f1", "address": "fa:16:3e:ec:99:7c", "network": {"id": "08004b49-dbc2-4186-9e28-4268e947e8ee", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2022236674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2ecbd5f22c024de8a6b1c45096cb79a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6444a30f-9c", "ovs_interfaceid": "6444a30f-9c50-4eaf-b562-178b627dc0f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.316985] env[62070]: DEBUG nova.compute.manager [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 853.339510] env[62070]: DEBUG oslo_vmware.api [None req-7f94bbb7-1fd6-441b-8175-a6f259611f31 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121859, 'name': ReconfigVM_Task, 'duration_secs': 0.315684} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.339779] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f94bbb7-1fd6-441b-8175-a6f259611f31 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Reconfigured VM instance instance-0000003d to detach disk 2001 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 853.339988] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f94bbb7-1fd6-441b-8175-a6f259611f31 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 853.343274] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-08f0e676-541e-4baf-a75f-49dc012ff1af {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.350981] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121855, 'name': CreateVM_Task, 'duration_secs': 2.156841} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.352099] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 853.352099] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.353510] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.353510] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 853.353510] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fec79dd8-549c-4fc1-888c-6b7bd6b3e426 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.357186] env[62070]: DEBUG oslo_vmware.api [None req-7f94bbb7-1fd6-441b-8175-a6f259611f31 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 853.357186] env[62070]: value = "task-1121861" [ 853.357186] env[62070]: _type = "Task" [ 853.357186] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.359370] env[62070]: DEBUG nova.virt.hardware [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 853.359610] env[62070]: DEBUG nova.virt.hardware [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 853.359773] env[62070]: DEBUG nova.virt.hardware [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 853.359959] env[62070]: DEBUG nova.virt.hardware [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 853.360144] env[62070]: DEBUG nova.virt.hardware [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 853.360327] env[62070]: DEBUG nova.virt.hardware [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 853.360565] env[62070]: DEBUG nova.virt.hardware [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 853.360815] env[62070]: DEBUG nova.virt.hardware [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 853.361038] env[62070]: DEBUG nova.virt.hardware [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 853.361220] env[62070]: DEBUG nova.virt.hardware [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 853.361426] env[62070]: DEBUG nova.virt.hardware [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 853.363475] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b97abb3b-198c-4788-8916-5fd6abfae33c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.368025] env[62070]: DEBUG oslo_vmware.api [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Waiting for the task: (returnval){ [ 853.368025] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52f9d59a-ff10-7287-6ce5-577a8b72f41b" [ 853.368025] env[62070]: _type = "Task" [ 853.368025] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.377248] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b993a7-b565-40df-a14c-05b56bf6c8c8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.388492] env[62070]: DEBUG oslo_vmware.api [None req-7f94bbb7-1fd6-441b-8175-a6f259611f31 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121861, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.388789] env[62070]: DEBUG oslo_vmware.api [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52f9d59a-ff10-7287-6ce5-577a8b72f41b, 'name': SearchDatastore_Task, 'duration_secs': 0.010446} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.389325] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.389571] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 853.389783] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.460535] env[62070]: DEBUG nova.scheduler.client.report [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 853.474346] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121860, 'name': Rename_Task, 'duration_secs': 0.207731} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.474650] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 853.474919] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-edb1dd68-3e94-4199-82fd-36d2a0b05713 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.484410] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 853.484410] env[62070]: value = "task-1121862" [ 853.484410] env[62070]: _type = "Task" [ 853.484410] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.493756] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121862, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.594700] env[62070]: DEBUG oslo_concurrency.lockutils [req-f9165459-942c-438a-922a-6fd3e8d9ac9a req-3eb5a622-17d0-4660-ae43-1d0b5d07f4a4 service nova] Releasing lock "refresh_cache-62758a38-4819-4d5a-97ed-db6c9ceb97bf" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.629151] env[62070]: DEBUG oslo_concurrency.lockutils [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "27987ff6-77c9-4876-8b39-dcc20ce4158a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.629151] env[62070]: DEBUG oslo_concurrency.lockutils [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "27987ff6-77c9-4876-8b39-dcc20ce4158a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.629151] env[62070]: DEBUG oslo_concurrency.lockutils [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "27987ff6-77c9-4876-8b39-dcc20ce4158a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.629566] env[62070]: DEBUG oslo_concurrency.lockutils [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "27987ff6-77c9-4876-8b39-dcc20ce4158a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.629566] env[62070]: DEBUG oslo_concurrency.lockutils [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "27987ff6-77c9-4876-8b39-dcc20ce4158a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.635181] env[62070]: INFO nova.compute.manager [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Terminating instance [ 853.637617] env[62070]: DEBUG nova.compute.manager [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 853.637617] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 853.638306] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4d2bb5-670b-44ac-954d-d9905468c9cc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.648056] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 853.648162] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-00c5f172-b1a1-499e-b483-59bb5af5c81a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.663702] env[62070]: DEBUG oslo_vmware.api [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 853.663702] env[62070]: value = "task-1121863" [ 853.663702] env[62070]: _type = "Task" [ 853.663702] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.672327] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527a0d55-4c35-621f-4118-6b7555c7162c, 'name': SearchDatastore_Task, 'duration_secs': 0.009789} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.673881] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af6fff43-c06d-4712-82d2-7377caeefbf0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.681155] env[62070]: DEBUG oslo_vmware.api [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121863, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.686259] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 853.686259] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52899d87-e6fb-3de4-d733-a5f76b722449" [ 853.686259] env[62070]: _type = "Task" [ 853.686259] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.698959] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52899d87-e6fb-3de4-d733-a5f76b722449, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.714207] env[62070]: DEBUG oslo_concurrency.lockutils [None req-03acc965-7789-4e44-9beb-b0ccc1f1df38 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Releasing lock "refresh_cache-e74fd58c-cfa8-45c4-8f02-96234b4a9192" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.714988] env[62070]: DEBUG nova.objects.instance [None req-03acc965-7789-4e44-9beb-b0ccc1f1df38 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Lazy-loading 'flavor' on Instance uuid e74fd58c-cfa8-45c4-8f02-96234b4a9192 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 853.716636] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "e5deccf6-f967-4e3c-bee0-2e1ad0bb4560" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.716933] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "e5deccf6-f967-4e3c-bee0-2e1ad0bb4560" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.719047] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "e5deccf6-f967-4e3c-bee0-2e1ad0bb4560-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.719047] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "e5deccf6-f967-4e3c-bee0-2e1ad0bb4560-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.719047] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "e5deccf6-f967-4e3c-bee0-2e1ad0bb4560-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.719907] env[62070]: INFO nova.compute.manager [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Terminating instance [ 853.722176] env[62070]: DEBUG nova.compute.manager [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 853.723878] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 853.724927] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43eb786f-09b5-457f-ba46-d1c70cfe8260 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.733967] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 853.734976] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-679b6958-8589-48cb-9f93-a436d32e2f98 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.748375] env[62070]: DEBUG oslo_vmware.api [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 853.748375] env[62070]: value = "task-1121864" [ 853.748375] env[62070]: _type = "Task" [ 853.748375] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.760661] env[62070]: DEBUG oslo_vmware.api [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121864, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.876270] env[62070]: DEBUG oslo_vmware.api [None req-7f94bbb7-1fd6-441b-8175-a6f259611f31 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1121861, 'name': PowerOnVM_Task, 'duration_secs': 0.424664} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.876810] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f94bbb7-1fd6-441b-8175-a6f259611f31 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 853.877191] env[62070]: DEBUG nova.compute.manager [None req-7f94bbb7-1fd6-441b-8175-a6f259611f31 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 853.878252] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b86d11b-e008-4ae6-bd81-95c08271e7d7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.967925] env[62070]: DEBUG oslo_concurrency.lockutils [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.682s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.969472] env[62070]: DEBUG nova.compute.manager [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 853.974282] env[62070]: DEBUG oslo_concurrency.lockutils [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.148s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.974627] env[62070]: DEBUG nova.objects.instance [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lazy-loading 'resources' on Instance uuid c3c6e93c-80be-4e71-87fb-2ff8db8d30fe {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 853.999982] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121862, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.176734] env[62070]: DEBUG oslo_vmware.api [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121863, 'name': PowerOffVM_Task, 'duration_secs': 0.28265} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.177470] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 854.177769] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 854.178160] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0139c319-6591-42e6-9f53-9c27615287ca {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.200032] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52899d87-e6fb-3de4-d733-a5f76b722449, 'name': SearchDatastore_Task, 'duration_secs': 0.013515} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.200032] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.200032] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 62758a38-4819-4d5a-97ed-db6c9ceb97bf/62758a38-4819-4d5a-97ed-db6c9ceb97bf.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 854.200032] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.200032] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 854.200032] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f0479a01-ac9f-4393-8bd9-bdb03ba2b46f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.200032] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af790176-318b-4555-90d6-7c7f562449ae {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.209545] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 854.209545] env[62070]: value = "task-1121866" [ 854.209545] env[62070]: _type = "Task" [ 854.209545] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.211748] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 854.211748] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 854.217783] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2668e8a-12f9-4d4c-9567-c88d6d28f2e9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.225053] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92792dff-63e3-4f36-a420-6874e7010726 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.230972] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121866, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.232504] env[62070]: DEBUG oslo_vmware.api [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Waiting for the task: (returnval){ [ 854.232504] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52a027ea-84a6-2a99-4737-01474962b709" [ 854.232504] env[62070]: _type = "Task" [ 854.232504] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.263558] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-03acc965-7789-4e44-9beb-b0ccc1f1df38 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 854.269487] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0549a7df-5c73-45cc-b56d-420f3f4c74d9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.271471] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 854.271819] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 854.272173] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Deleting the datastore file [datastore2] 27987ff6-77c9-4876-8b39-dcc20ce4158a {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 854.282441] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e2739e11-bff0-4125-9c43-1994477d6700 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.283149] env[62070]: DEBUG oslo_vmware.api [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52a027ea-84a6-2a99-4737-01474962b709, 'name': SearchDatastore_Task, 'duration_secs': 0.026827} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.285647] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4a0d806-5f14-4ae0-85bd-86eae88e0dbc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.293145] env[62070]: DEBUG oslo_vmware.api [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121864, 'name': PowerOffVM_Task, 'duration_secs': 0.212709} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.296253] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 854.296495] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 854.297305] env[62070]: DEBUG oslo_vmware.api [None req-03acc965-7789-4e44-9beb-b0ccc1f1df38 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 854.297305] env[62070]: value = "task-1121867" [ 854.297305] env[62070]: _type = "Task" [ 854.297305] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.297667] env[62070]: DEBUG oslo_vmware.api [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 854.297667] env[62070]: value = "task-1121868" [ 854.297667] env[62070]: _type = "Task" [ 854.297667] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.297921] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-984154d3-4c1a-4bc5-b00c-fe931e950dbf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.308718] env[62070]: DEBUG oslo_vmware.api [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Waiting for the task: (returnval){ [ 854.308718] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52e964cc-c476-ac3d-1403-5370500ee16f" [ 854.308718] env[62070]: _type = "Task" [ 854.308718] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.319444] env[62070]: DEBUG oslo_vmware.api [None req-03acc965-7789-4e44-9beb-b0ccc1f1df38 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121867, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.320301] env[62070]: DEBUG oslo_vmware.api [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121868, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.327708] env[62070]: DEBUG oslo_vmware.api [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52e964cc-c476-ac3d-1403-5370500ee16f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.484885] env[62070]: DEBUG nova.compute.utils [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 854.486926] env[62070]: DEBUG nova.compute.manager [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 854.487239] env[62070]: DEBUG nova.network.neutron [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 854.500804] env[62070]: DEBUG oslo_vmware.api [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121862, 'name': PowerOnVM_Task, 'duration_secs': 0.529645} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.501709] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 854.502180] env[62070]: INFO nova.compute.manager [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Took 12.26 seconds to spawn the instance on the hypervisor. [ 854.502281] env[62070]: DEBUG nova.compute.manager [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 854.503078] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10032350-0fb3-4af7-84b0-3052eeefe703 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.585437] env[62070]: DEBUG nova.policy [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '85707399ddf04d03bfb487560df1212e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd079c0ef3ed745fcaf69dc728dca4466', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 854.722881] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121866, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.827737] env[62070]: DEBUG oslo_vmware.api [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121868, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.828368] env[62070]: DEBUG oslo_vmware.api [None req-03acc965-7789-4e44-9beb-b0ccc1f1df38 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121867, 'name': PowerOffVM_Task, 'duration_secs': 0.237663} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.832355] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-03acc965-7789-4e44-9beb-b0ccc1f1df38 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 854.843583] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-03acc965-7789-4e44-9beb-b0ccc1f1df38 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Reconfiguring VM instance instance-00000040 to detach disk 2001 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 854.844685] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5256911f-ae9e-4f37-8d29-6866aba332f8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.879384] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 854.879744] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 854.879977] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Deleting the datastore file [datastore2] e5deccf6-f967-4e3c-bee0-2e1ad0bb4560 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 854.880368] env[62070]: DEBUG oslo_vmware.api [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52e964cc-c476-ac3d-1403-5370500ee16f, 'name': SearchDatastore_Task, 'duration_secs': 0.028522} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.881394] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b79d3571-2039-4805-8863-fd3a371347cc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.884146] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.884632] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 61ab347d-1342-4f59-8955-10d575993b77/61ab347d-1342-4f59-8955-10d575993b77.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 854.884857] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-34898c54-9f41-4076-99d1-f94637a734bd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.891103] env[62070]: DEBUG oslo_vmware.api [None req-03acc965-7789-4e44-9beb-b0ccc1f1df38 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 854.891103] env[62070]: value = "task-1121870" [ 854.891103] env[62070]: _type = "Task" [ 854.891103] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.900500] env[62070]: DEBUG oslo_vmware.api [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Waiting for the task: (returnval){ [ 854.900500] env[62070]: value = "task-1121872" [ 854.900500] env[62070]: _type = "Task" [ 854.900500] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.900840] env[62070]: DEBUG oslo_vmware.api [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for the task: (returnval){ [ 854.900840] env[62070]: value = "task-1121871" [ 854.900840] env[62070]: _type = "Task" [ 854.900840] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.929804] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Acquiring lock "d2cfcfac-4f15-4b16-9046-76722ee2e39b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.930170] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Lock "d2cfcfac-4f15-4b16-9046-76722ee2e39b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.931697] env[62070]: DEBUG oslo_vmware.api [None req-03acc965-7789-4e44-9beb-b0ccc1f1df38 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121870, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.941919] env[62070]: DEBUG oslo_vmware.api [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121871, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.945021] env[62070]: DEBUG oslo_vmware.api [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121872, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.953012] env[62070]: DEBUG nova.network.neutron [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Successfully updated port: b1394ea1-e455-496f-9aa1-6eacc606ec0a {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 854.965564] env[62070]: DEBUG nova.compute.manager [req-7070f51f-b5da-406f-b2cb-b13ce5dc18a8 req-2d6f1e30-fdb7-4d38-9f64-9bf56f206510 service nova] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Received event network-vif-plugged-b1394ea1-e455-496f-9aa1-6eacc606ec0a {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 854.965860] env[62070]: DEBUG oslo_concurrency.lockutils [req-7070f51f-b5da-406f-b2cb-b13ce5dc18a8 req-2d6f1e30-fdb7-4d38-9f64-9bf56f206510 service nova] Acquiring lock "4bba7448-69f7-4764-9ae6-eb6585f71515-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.966175] env[62070]: DEBUG oslo_concurrency.lockutils [req-7070f51f-b5da-406f-b2cb-b13ce5dc18a8 req-2d6f1e30-fdb7-4d38-9f64-9bf56f206510 service nova] Lock "4bba7448-69f7-4764-9ae6-eb6585f71515-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.966323] env[62070]: DEBUG oslo_concurrency.lockutils [req-7070f51f-b5da-406f-b2cb-b13ce5dc18a8 req-2d6f1e30-fdb7-4d38-9f64-9bf56f206510 service nova] Lock "4bba7448-69f7-4764-9ae6-eb6585f71515-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.966622] env[62070]: DEBUG nova.compute.manager [req-7070f51f-b5da-406f-b2cb-b13ce5dc18a8 req-2d6f1e30-fdb7-4d38-9f64-9bf56f206510 service nova] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] No waiting events found dispatching network-vif-plugged-b1394ea1-e455-496f-9aa1-6eacc606ec0a {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 854.967887] env[62070]: WARNING nova.compute.manager [req-7070f51f-b5da-406f-b2cb-b13ce5dc18a8 req-2d6f1e30-fdb7-4d38-9f64-9bf56f206510 service nova] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Received unexpected event network-vif-plugged-b1394ea1-e455-496f-9aa1-6eacc606ec0a for instance with vm_state building and task_state spawning. [ 854.991158] env[62070]: DEBUG nova.compute.manager [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 855.026823] env[62070]: INFO nova.compute.manager [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Took 44.45 seconds to build instance. [ 855.084493] env[62070]: DEBUG nova.network.neutron [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Successfully created port: 32f47285-7ff0-405e-849d-27e73999e359 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 855.089289] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c175a0a0-9b4f-463c-80c5-4bc47b2f19e4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.100550] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62c610ad-7d21-4ea2-be9b-b2e6fed408c9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.141089] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7df5030a-0a95-4135-afab-b95c4b915584 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.151084] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baea0672-0e77-42d7-ad0a-32703d8d2631 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.167278] env[62070]: DEBUG nova.compute.provider_tree [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 855.223598] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121866, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.731829} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.223891] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 62758a38-4819-4d5a-97ed-db6c9ceb97bf/62758a38-4819-4d5a-97ed-db6c9ceb97bf.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 855.224133] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 855.224409] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-61db8ff9-806c-4ae2-9ad4-e2177d511028 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.234444] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 855.234444] env[62070]: value = "task-1121873" [ 855.234444] env[62070]: _type = "Task" [ 855.234444] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.246568] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121873, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.312486] env[62070]: DEBUG oslo_vmware.api [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121868, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.657023} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.312905] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 855.313027] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 855.313152] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 855.313333] env[62070]: INFO nova.compute.manager [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Took 1.68 seconds to destroy the instance on the hypervisor. [ 855.313648] env[62070]: DEBUG oslo.service.loopingcall [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 855.313995] env[62070]: DEBUG nova.compute.manager [-] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 855.313995] env[62070]: DEBUG nova.network.neutron [-] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 855.401773] env[62070]: DEBUG oslo_vmware.api [None req-03acc965-7789-4e44-9beb-b0ccc1f1df38 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121870, 'name': ReconfigVM_Task, 'duration_secs': 0.285365} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.402134] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-03acc965-7789-4e44-9beb-b0ccc1f1df38 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Reconfigured VM instance instance-00000040 to detach disk 2001 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 855.402306] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-03acc965-7789-4e44-9beb-b0ccc1f1df38 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 855.402529] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-205d2615-4d70-43b2-b92d-6aebe7a097e1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.414570] env[62070]: DEBUG oslo_vmware.api [None req-03acc965-7789-4e44-9beb-b0ccc1f1df38 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 855.414570] env[62070]: value = "task-1121874" [ 855.414570] env[62070]: _type = "Task" [ 855.414570] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.424240] env[62070]: DEBUG oslo_vmware.api [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121872, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.424240] env[62070]: DEBUG oslo_vmware.api [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Task: {'id': task-1121871, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.19587} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.424830] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 855.425071] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 855.425289] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 855.425430] env[62070]: INFO nova.compute.manager [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Took 1.70 seconds to destroy the instance on the hypervisor. [ 855.425664] env[62070]: DEBUG oslo.service.loopingcall [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 855.425854] env[62070]: DEBUG nova.compute.manager [-] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 855.425965] env[62070]: DEBUG nova.network.neutron [-] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 855.432125] env[62070]: DEBUG oslo_vmware.api [None req-03acc965-7789-4e44-9beb-b0ccc1f1df38 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121874, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.435644] env[62070]: DEBUG nova.compute.manager [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 855.457040] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Acquiring lock "refresh_cache-4bba7448-69f7-4764-9ae6-eb6585f71515" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.458622] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Acquired lock "refresh_cache-4bba7448-69f7-4764-9ae6-eb6585f71515" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.458622] env[62070]: DEBUG nova.network.neutron [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 855.529465] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7ef3c4d0-3bb0-4ce7-af88-13da4a0bcc02 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "c16d175c-0b23-4f72-bdb0-844c6f80fd32" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.813s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.694679] env[62070]: ERROR nova.scheduler.client.report [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [req-a22ad9f4-5fdb-45ee-8119-dc1c350d7fa7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 21c7c111-1b69-4468-b2c4-5dd96014fbd6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a22ad9f4-5fdb-45ee-8119-dc1c350d7fa7"}]} [ 855.712827] env[62070]: DEBUG nova.scheduler.client.report [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Refreshing inventories for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 855.731283] env[62070]: DEBUG nova.scheduler.client.report [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Updating ProviderTree inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 855.731283] env[62070]: DEBUG nova.compute.provider_tree [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 855.745623] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121873, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.172906} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.745726] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 855.746769] env[62070]: DEBUG nova.scheduler.client.report [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Refreshing aggregate associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, aggregates: None {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 855.749293] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e7ab36-b386-4a4e-b07e-5b5db9b9fdf8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.778110] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] 62758a38-4819-4d5a-97ed-db6c9ceb97bf/62758a38-4819-4d5a-97ed-db6c9ceb97bf.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 855.782022] env[62070]: DEBUG nova.scheduler.client.report [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Refreshing trait associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 855.782022] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8cf0835-150d-4912-9d6f-f87ae4a73c9b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.809568] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 855.809568] env[62070]: value = "task-1121875" [ 855.809568] env[62070]: _type = "Task" [ 855.809568] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.819328] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121875, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.919842] env[62070]: DEBUG oslo_vmware.api [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121872, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.547473} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.923281] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 61ab347d-1342-4f59-8955-10d575993b77/61ab347d-1342-4f59-8955-10d575993b77.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 855.923515] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 855.923787] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-83f2a6d1-2684-401f-8c67-0e0c2645d2fe {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.938787] env[62070]: DEBUG oslo_vmware.api [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Waiting for the task: (returnval){ [ 855.938787] env[62070]: value = "task-1121876" [ 855.938787] env[62070]: _type = "Task" [ 855.938787] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.939412] env[62070]: DEBUG oslo_vmware.api [None req-03acc965-7789-4e44-9beb-b0ccc1f1df38 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121874, 'name': PowerOnVM_Task, 'duration_secs': 0.404172} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.940816] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-03acc965-7789-4e44-9beb-b0ccc1f1df38 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 855.941123] env[62070]: DEBUG nova.compute.manager [None req-03acc965-7789-4e44-9beb-b0ccc1f1df38 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 855.953288] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c3381a-2c27-4e07-bb0f-a7b2af7864b8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.971665] env[62070]: DEBUG oslo_vmware.api [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121876, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.976390] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.002016] env[62070]: DEBUG nova.compute.manager [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 856.010949] env[62070]: DEBUG nova.network.neutron [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 856.038319] env[62070]: DEBUG nova.virt.hardware [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 856.038621] env[62070]: DEBUG nova.virt.hardware [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 856.039123] env[62070]: DEBUG nova.virt.hardware [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 856.039365] env[62070]: DEBUG nova.virt.hardware [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 856.039634] env[62070]: DEBUG nova.virt.hardware [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 856.040236] env[62070]: DEBUG nova.virt.hardware [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 856.040913] env[62070]: DEBUG nova.virt.hardware [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 856.041274] env[62070]: DEBUG nova.virt.hardware [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 856.041533] env[62070]: DEBUG nova.virt.hardware [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 856.041897] env[62070]: DEBUG nova.virt.hardware [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 856.042032] env[62070]: DEBUG nova.virt.hardware [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 856.043124] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc88881-5f35-40d1-922c-c5fcdd37591d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.057125] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ef0cd6-39e1-4774-9594-dfae9340f100 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.175126] env[62070]: DEBUG nova.network.neutron [-] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.219899] env[62070]: DEBUG nova.network.neutron [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Updating instance_info_cache with network_info: [{"id": "b1394ea1-e455-496f-9aa1-6eacc606ec0a", "address": "fa:16:3e:82:5b:76", "network": {"id": "eb169037-d713-4ffd-8936-dc4c33b4724d", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-358048878-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d85661855bb646e1935f013cb2607aec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1394ea1-e4", "ovs_interfaceid": "b1394ea1-e455-496f-9aa1-6eacc606ec0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.304640] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6abe677-ca76-408a-bf6b-d1d05aebf704 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.316255] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-452d432d-7ea3-4bf0-a487-c57dc92e741f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.324716] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121875, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.351933] env[62070]: DEBUG nova.network.neutron [-] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.353789] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74145059-2270-4b4c-82c7-7c851a43a70f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.362944] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f3f72d-8fbf-4907-98e6-9944eaaf9b7f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.381401] env[62070]: DEBUG nova.compute.provider_tree [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 856.452123] env[62070]: DEBUG oslo_vmware.api [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121876, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.119047} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.452462] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 856.453297] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-734c6088-01c2-4cc6-8289-69f4a1129f57 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.458139] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5ea28d70-9055-4c31-95a2-4fc36eb772ba tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "c16d175c-0b23-4f72-bdb0-844c6f80fd32" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.458316] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5ea28d70-9055-4c31-95a2-4fc36eb772ba tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "c16d175c-0b23-4f72-bdb0-844c6f80fd32" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.458534] env[62070]: DEBUG nova.compute.manager [None req-5ea28d70-9055-4c31-95a2-4fc36eb772ba tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 856.459490] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a6b1e7-2ad0-4927-881f-852aacb258d7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.482943] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 61ab347d-1342-4f59-8955-10d575993b77/61ab347d-1342-4f59-8955-10d575993b77.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 856.486178] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dce2e52a-d60a-49d2-aaad-e16d2b954bce {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.504338] env[62070]: DEBUG nova.compute.manager [None req-5ea28d70-9055-4c31-95a2-4fc36eb772ba tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62070) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 856.504942] env[62070]: DEBUG nova.objects.instance [None req-5ea28d70-9055-4c31-95a2-4fc36eb772ba tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lazy-loading 'flavor' on Instance uuid c16d175c-0b23-4f72-bdb0-844c6f80fd32 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 856.513216] env[62070]: DEBUG oslo_vmware.api [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Waiting for the task: (returnval){ [ 856.513216] env[62070]: value = "task-1121877" [ 856.513216] env[62070]: _type = "Task" [ 856.513216] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.523027] env[62070]: DEBUG oslo_vmware.api [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121877, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.682612] env[62070]: INFO nova.compute.manager [-] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Took 1.37 seconds to deallocate network for instance. [ 856.727346] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Releasing lock "refresh_cache-4bba7448-69f7-4764-9ae6-eb6585f71515" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.727823] env[62070]: DEBUG nova.compute.manager [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Instance network_info: |[{"id": "b1394ea1-e455-496f-9aa1-6eacc606ec0a", "address": "fa:16:3e:82:5b:76", "network": {"id": "eb169037-d713-4ffd-8936-dc4c33b4724d", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-358048878-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d85661855bb646e1935f013cb2607aec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1394ea1-e4", "ovs_interfaceid": "b1394ea1-e455-496f-9aa1-6eacc606ec0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 856.729017] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:5b:76', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6ab2e9f5-54fd-4cab-9405-ed65e2aaba64', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b1394ea1-e455-496f-9aa1-6eacc606ec0a', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 856.737920] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Creating folder: Project (d85661855bb646e1935f013cb2607aec). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 856.739099] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-295eddab-840e-4208-a539-277a2fafbc37 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.745283] env[62070]: DEBUG nova.compute.manager [req-1c0c7913-335d-470e-9330-3ff55005c5ba req-6159f0e5-176f-4dcd-9be5-6fc033712191 service nova] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Received event network-vif-plugged-32f47285-7ff0-405e-849d-27e73999e359 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 856.745582] env[62070]: DEBUG oslo_concurrency.lockutils [req-1c0c7913-335d-470e-9330-3ff55005c5ba req-6159f0e5-176f-4dcd-9be5-6fc033712191 service nova] Acquiring lock "fb054a32-c1aa-4884-a087-da5ad34cf3c4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.745842] env[62070]: DEBUG oslo_concurrency.lockutils [req-1c0c7913-335d-470e-9330-3ff55005c5ba req-6159f0e5-176f-4dcd-9be5-6fc033712191 service nova] Lock "fb054a32-c1aa-4884-a087-da5ad34cf3c4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.746072] env[62070]: DEBUG oslo_concurrency.lockutils [req-1c0c7913-335d-470e-9330-3ff55005c5ba req-6159f0e5-176f-4dcd-9be5-6fc033712191 service nova] Lock "fb054a32-c1aa-4884-a087-da5ad34cf3c4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.746323] env[62070]: DEBUG nova.compute.manager [req-1c0c7913-335d-470e-9330-3ff55005c5ba req-6159f0e5-176f-4dcd-9be5-6fc033712191 service nova] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] No waiting events found dispatching network-vif-plugged-32f47285-7ff0-405e-849d-27e73999e359 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 856.746566] env[62070]: WARNING nova.compute.manager [req-1c0c7913-335d-470e-9330-3ff55005c5ba req-6159f0e5-176f-4dcd-9be5-6fc033712191 service nova] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Received unexpected event network-vif-plugged-32f47285-7ff0-405e-849d-27e73999e359 for instance with vm_state building and task_state spawning. [ 856.751088] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Created folder: Project (d85661855bb646e1935f013cb2607aec) in parent group-v245319. [ 856.751305] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Creating folder: Instances. Parent ref: group-v245428. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 856.751577] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-35223e34-8c80-4a51-a017-88111a797551 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.764911] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Created folder: Instances in parent group-v245428. [ 856.765369] env[62070]: DEBUG oslo.service.loopingcall [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 856.765694] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 856.766058] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4fc6e6e8-c9a2-41e6-b0dd-aa77306a4655 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.792544] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 856.792544] env[62070]: value = "task-1121880" [ 856.792544] env[62070]: _type = "Task" [ 856.792544] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.808099] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121880, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.827359] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121875, 'name': ReconfigVM_Task, 'duration_secs': 0.928062} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.827690] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Reconfigured VM instance instance-00000045 to attach disk [datastore2] 62758a38-4819-4d5a-97ed-db6c9ceb97bf/62758a38-4819-4d5a-97ed-db6c9ceb97bf.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 856.828660] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8c0cb4ed-25aa-41c4-aaf5-d6b6f42568ad {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.836190] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 856.836190] env[62070]: value = "task-1121881" [ 856.836190] env[62070]: _type = "Task" [ 856.836190] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.849032] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121881, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.857883] env[62070]: INFO nova.compute.manager [-] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Took 1.43 seconds to deallocate network for instance. [ 856.925443] env[62070]: DEBUG nova.scheduler.client.report [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Updated inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with generation 94 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 856.925775] env[62070]: DEBUG nova.compute.provider_tree [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Updating resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 generation from 94 to 95 during operation: update_inventory {{(pid=62070) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 856.926109] env[62070]: DEBUG nova.compute.provider_tree [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 856.949868] env[62070]: DEBUG nova.network.neutron [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Successfully updated port: 32f47285-7ff0-405e-849d-27e73999e359 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 857.010271] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ea28d70-9055-4c31-95a2-4fc36eb772ba tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 857.010675] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a858221b-db5d-4592-9d83-e7fc9979b74e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.020722] env[62070]: DEBUG oslo_vmware.api [None req-5ea28d70-9055-4c31-95a2-4fc36eb772ba tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 857.020722] env[62070]: value = "task-1121882" [ 857.020722] env[62070]: _type = "Task" [ 857.020722] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.030967] env[62070]: DEBUG oslo_vmware.api [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121877, 'name': ReconfigVM_Task, 'duration_secs': 0.424944} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.031219] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 61ab347d-1342-4f59-8955-10d575993b77/61ab347d-1342-4f59-8955-10d575993b77.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 857.031882] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4f7b55f7-527f-4f1c-ac84-073774f128c3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.037873] env[62070]: DEBUG oslo_vmware.api [None req-5ea28d70-9055-4c31-95a2-4fc36eb772ba tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121882, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.041071] env[62070]: DEBUG oslo_vmware.api [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Waiting for the task: (returnval){ [ 857.041071] env[62070]: value = "task-1121883" [ 857.041071] env[62070]: _type = "Task" [ 857.041071] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.052664] env[62070]: DEBUG oslo_vmware.api [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121883, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.100776] env[62070]: DEBUG nova.compute.manager [req-b4ab8774-4043-4acd-8c7c-5ebf56bf25f4 req-556ce0dd-011f-405b-8442-81cc54520e82 service nova] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Received event network-changed-b1394ea1-e455-496f-9aa1-6eacc606ec0a {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 857.100776] env[62070]: DEBUG nova.compute.manager [req-b4ab8774-4043-4acd-8c7c-5ebf56bf25f4 req-556ce0dd-011f-405b-8442-81cc54520e82 service nova] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Refreshing instance network info cache due to event network-changed-b1394ea1-e455-496f-9aa1-6eacc606ec0a. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 857.100776] env[62070]: DEBUG oslo_concurrency.lockutils [req-b4ab8774-4043-4acd-8c7c-5ebf56bf25f4 req-556ce0dd-011f-405b-8442-81cc54520e82 service nova] Acquiring lock "refresh_cache-4bba7448-69f7-4764-9ae6-eb6585f71515" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.101218] env[62070]: DEBUG oslo_concurrency.lockutils [req-b4ab8774-4043-4acd-8c7c-5ebf56bf25f4 req-556ce0dd-011f-405b-8442-81cc54520e82 service nova] Acquired lock "refresh_cache-4bba7448-69f7-4764-9ae6-eb6585f71515" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.101218] env[62070]: DEBUG nova.network.neutron [req-b4ab8774-4043-4acd-8c7c-5ebf56bf25f4 req-556ce0dd-011f-405b-8442-81cc54520e82 service nova] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Refreshing network info cache for port b1394ea1-e455-496f-9aa1-6eacc606ec0a {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 857.191175] env[62070]: DEBUG oslo_concurrency.lockutils [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.305609] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121880, 'name': CreateVM_Task, 'duration_secs': 0.383226} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.305913] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 857.306919] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.307242] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.307739] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 857.308126] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6dbcd6c-0d1a-4543-be79-8c0ea88432a7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.314907] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Waiting for the task: (returnval){ [ 857.314907] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]526c4621-ba61-5581-0464-e9a85f62fc13" [ 857.314907] env[62070]: _type = "Task" [ 857.314907] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.326666] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]526c4621-ba61-5581-0464-e9a85f62fc13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.345664] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121881, 'name': Rename_Task, 'duration_secs': 0.211578} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.345938] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 857.346194] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4e1254b1-c4c7-40a7-90d8-0c7a3dc74ba3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.352964] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 857.352964] env[62070]: value = "task-1121884" [ 857.352964] env[62070]: _type = "Task" [ 857.352964] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.361958] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121884, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.369579] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.408878] env[62070]: DEBUG oslo_concurrency.lockutils [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquiring lock "e74fd58c-cfa8-45c4-8f02-96234b4a9192" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.409267] env[62070]: DEBUG oslo_concurrency.lockutils [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Lock "e74fd58c-cfa8-45c4-8f02-96234b4a9192" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.409702] env[62070]: DEBUG oslo_concurrency.lockutils [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquiring lock "e74fd58c-cfa8-45c4-8f02-96234b4a9192-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.409702] env[62070]: DEBUG oslo_concurrency.lockutils [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Lock "e74fd58c-cfa8-45c4-8f02-96234b4a9192-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.409881] env[62070]: DEBUG oslo_concurrency.lockutils [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Lock "e74fd58c-cfa8-45c4-8f02-96234b4a9192-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.412234] env[62070]: INFO nova.compute.manager [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Terminating instance [ 857.414822] env[62070]: DEBUG nova.compute.manager [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 857.415031] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 857.415866] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63bb2f5-ef3c-4ebb-9b93-277cc1412eac {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.424599] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 857.424844] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4e704e9c-5d77-4c39-b7db-155a13eff055 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.431970] env[62070]: DEBUG oslo_concurrency.lockutils [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.458s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.435544] env[62070]: DEBUG oslo_concurrency.lockutils [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.551s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.435815] env[62070]: DEBUG nova.objects.instance [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Lazy-loading 'resources' on Instance uuid d0914f90-200c-4715-aaab-54beacf339b9 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 857.438252] env[62070]: DEBUG oslo_vmware.api [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 857.438252] env[62070]: value = "task-1121885" [ 857.438252] env[62070]: _type = "Task" [ 857.438252] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.447217] env[62070]: DEBUG oslo_vmware.api [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121885, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.454920] env[62070]: DEBUG oslo_concurrency.lockutils [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "refresh_cache-fb054a32-c1aa-4884-a087-da5ad34cf3c4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.454920] env[62070]: DEBUG oslo_concurrency.lockutils [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired lock "refresh_cache-fb054a32-c1aa-4884-a087-da5ad34cf3c4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.454920] env[62070]: DEBUG nova.network.neutron [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 857.462344] env[62070]: INFO nova.scheduler.client.report [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Deleted allocations for instance c3c6e93c-80be-4e71-87fb-2ff8db8d30fe [ 857.534144] env[62070]: DEBUG oslo_vmware.api [None req-5ea28d70-9055-4c31-95a2-4fc36eb772ba tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121882, 'name': PowerOffVM_Task, 'duration_secs': 0.267963} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.534775] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ea28d70-9055-4c31-95a2-4fc36eb772ba tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 857.535662] env[62070]: DEBUG nova.compute.manager [None req-5ea28d70-9055-4c31-95a2-4fc36eb772ba tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 857.535936] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a9fa0e9-82bf-4312-833a-ee789df18800 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.565147] env[62070]: DEBUG oslo_vmware.api [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121883, 'name': Rename_Task, 'duration_secs': 0.190257} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.565471] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 857.565746] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-486eb4ac-a65e-4f76-9aa8-b02e3d34d95b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.574509] env[62070]: DEBUG oslo_vmware.api [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Waiting for the task: (returnval){ [ 857.574509] env[62070]: value = "task-1121886" [ 857.574509] env[62070]: _type = "Task" [ 857.574509] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.585518] env[62070]: DEBUG oslo_vmware.api [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121886, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.826950] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]526c4621-ba61-5581-0464-e9a85f62fc13, 'name': SearchDatastore_Task, 'duration_secs': 0.033921} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.827277] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.827517] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 857.827781] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.827932] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.828145] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 857.828408] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-555b2583-ca18-4c69-9186-73bd0b98d2f6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.840020] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 857.840020] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 857.840020] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62230ecf-aa18-4f3a-aefd-2b7d942af3da {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.842438] env[62070]: DEBUG nova.network.neutron [req-b4ab8774-4043-4acd-8c7c-5ebf56bf25f4 req-556ce0dd-011f-405b-8442-81cc54520e82 service nova] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Updated VIF entry in instance network info cache for port b1394ea1-e455-496f-9aa1-6eacc606ec0a. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 857.842799] env[62070]: DEBUG nova.network.neutron [req-b4ab8774-4043-4acd-8c7c-5ebf56bf25f4 req-556ce0dd-011f-405b-8442-81cc54520e82 service nova] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Updating instance_info_cache with network_info: [{"id": "b1394ea1-e455-496f-9aa1-6eacc606ec0a", "address": "fa:16:3e:82:5b:76", "network": {"id": "eb169037-d713-4ffd-8936-dc4c33b4724d", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-358048878-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d85661855bb646e1935f013cb2607aec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1394ea1-e4", "ovs_interfaceid": "b1394ea1-e455-496f-9aa1-6eacc606ec0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.848288] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Waiting for the task: (returnval){ [ 857.848288] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5220b0ec-0093-c4bb-4e5b-de4e5445347f" [ 857.848288] env[62070]: _type = "Task" [ 857.848288] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.856410] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5220b0ec-0093-c4bb-4e5b-de4e5445347f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.865922] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121884, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.953302] env[62070]: DEBUG oslo_vmware.api [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121885, 'name': PowerOffVM_Task, 'duration_secs': 0.207357} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.953616] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 857.953818] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 857.954146] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-140d63f5-86b6-49fb-9cb3-26a6a5f5d39b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.971583] env[62070]: DEBUG oslo_concurrency.lockutils [None req-732b2ab1-8333-4faa-b641-cbbd113d03d2 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "c3c6e93c-80be-4e71-87fb-2ff8db8d30fe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.280s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.997457] env[62070]: DEBUG nova.network.neutron [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 858.033100] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 858.033356] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 858.033552] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Deleting the datastore file [datastore1] e74fd58c-cfa8-45c4-8f02-96234b4a9192 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 858.033826] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-77ee4b18-f2c1-44ac-ae90-688e2363c69e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.041736] env[62070]: DEBUG oslo_vmware.api [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 858.041736] env[62070]: value = "task-1121888" [ 858.041736] env[62070]: _type = "Task" [ 858.041736] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.052849] env[62070]: DEBUG oslo_vmware.api [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121888, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.056263] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5ea28d70-9055-4c31-95a2-4fc36eb772ba tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "c16d175c-0b23-4f72-bdb0-844c6f80fd32" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.598s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.081459] env[62070]: DEBUG oslo_concurrency.lockutils [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "7dc27fe6-495f-498d-88fe-a99ddc19a21c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.081783] env[62070]: DEBUG oslo_concurrency.lockutils [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "7dc27fe6-495f-498d-88fe-a99ddc19a21c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.095100] env[62070]: DEBUG oslo_vmware.api [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121886, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.215617] env[62070]: DEBUG nova.network.neutron [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Updating instance_info_cache with network_info: [{"id": "32f47285-7ff0-405e-849d-27e73999e359", "address": "fa:16:3e:dd:6e:ec", "network": {"id": "4888f989-958d-49ff-bf5a-06873e4cc624", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-906255456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d079c0ef3ed745fcaf69dc728dca4466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32f47285-7f", "ovs_interfaceid": "32f47285-7ff0-405e-849d-27e73999e359", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.346148] env[62070]: DEBUG oslo_concurrency.lockutils [req-b4ab8774-4043-4acd-8c7c-5ebf56bf25f4 req-556ce0dd-011f-405b-8442-81cc54520e82 service nova] Releasing lock "refresh_cache-4bba7448-69f7-4764-9ae6-eb6585f71515" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.346428] env[62070]: DEBUG nova.compute.manager [req-b4ab8774-4043-4acd-8c7c-5ebf56bf25f4 req-556ce0dd-011f-405b-8442-81cc54520e82 service nova] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Received event network-vif-deleted-ec5674a0-9a0f-48f3-ad88-00fe5f326e8c {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 858.346638] env[62070]: DEBUG nova.compute.manager [req-b4ab8774-4043-4acd-8c7c-5ebf56bf25f4 req-556ce0dd-011f-405b-8442-81cc54520e82 service nova] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Received event network-vif-deleted-63044c40-0b6c-4711-9987-e4b6dec9f8b5 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 858.364698] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5220b0ec-0093-c4bb-4e5b-de4e5445347f, 'name': SearchDatastore_Task, 'duration_secs': 0.015616} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.365893] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66a4a51e-5a1b-48da-a4e4-d1122c97354c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.373663] env[62070]: DEBUG oslo_vmware.api [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121884, 'name': PowerOnVM_Task, 'duration_secs': 0.621956} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.374449] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 858.374724] env[62070]: INFO nova.compute.manager [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Took 12.45 seconds to spawn the instance on the hypervisor. [ 858.374924] env[62070]: DEBUG nova.compute.manager [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 858.375724] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cb8b1a8-b0f6-4105-933e-3f71eb201045 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.379436] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Waiting for the task: (returnval){ [ 858.379436] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]526fb493-7f49-5362-4ceb-dbc1cf5e7e73" [ 858.379436] env[62070]: _type = "Task" [ 858.379436] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.388691] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa1af2b2-f266-4777-84dc-445410fbbf62 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.394693] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]526fb493-7f49-5362-4ceb-dbc1cf5e7e73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.400434] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5407151d-3841-4d6b-b336-453bc6e58281 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.433112] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4bbd544-87e9-4fe5-97a2-f485e60f0922 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.442372] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba55ee2-5569-4fea-9fa9-82a45dfa43a0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.461329] env[62070]: DEBUG nova.compute.provider_tree [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 858.554051] env[62070]: DEBUG oslo_vmware.api [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1121888, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.49115} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.554317] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 858.554533] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 858.555066] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 858.555066] env[62070]: INFO nova.compute.manager [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Took 1.14 seconds to destroy the instance on the hypervisor. [ 858.555325] env[62070]: DEBUG oslo.service.loopingcall [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 858.555528] env[62070]: DEBUG nova.compute.manager [-] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 858.555625] env[62070]: DEBUG nova.network.neutron [-] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 858.589191] env[62070]: DEBUG nova.compute.manager [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 858.592156] env[62070]: DEBUG oslo_vmware.api [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121886, 'name': PowerOnVM_Task, 'duration_secs': 0.573308} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.594440] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 858.594661] env[62070]: INFO nova.compute.manager [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Took 7.91 seconds to spawn the instance on the hypervisor. [ 858.594871] env[62070]: DEBUG nova.compute.manager [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 858.596293] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98145689-80ba-4436-a905-7122d1a57e0b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.718201] env[62070]: DEBUG oslo_concurrency.lockutils [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Releasing lock "refresh_cache-fb054a32-c1aa-4884-a087-da5ad34cf3c4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.718520] env[62070]: DEBUG nova.compute.manager [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Instance network_info: |[{"id": "32f47285-7ff0-405e-849d-27e73999e359", "address": "fa:16:3e:dd:6e:ec", "network": {"id": "4888f989-958d-49ff-bf5a-06873e4cc624", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-906255456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d079c0ef3ed745fcaf69dc728dca4466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32f47285-7f", "ovs_interfaceid": "32f47285-7ff0-405e-849d-27e73999e359", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 858.718953] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:6e:ec', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7e0240aa-a694-48fc-a0f9-6f2d3e71aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '32f47285-7ff0-405e-849d-27e73999e359', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 858.727461] env[62070]: DEBUG oslo.service.loopingcall [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 858.728595] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 858.728967] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9922df9c-c7a8-4088-9299-8ca44f824185 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.765244] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 858.765244] env[62070]: value = "task-1121889" [ 858.765244] env[62070]: _type = "Task" [ 858.765244] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.780133] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121889, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.896864] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]526fb493-7f49-5362-4ceb-dbc1cf5e7e73, 'name': SearchDatastore_Task, 'duration_secs': 0.023205} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.899316] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.899624] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 4bba7448-69f7-4764-9ae6-eb6585f71515/4bba7448-69f7-4764-9ae6-eb6585f71515.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 858.900323] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-78d5209a-cdbd-4ded-b57b-b65192a15664 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.902731] env[62070]: INFO nova.compute.manager [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Took 38.52 seconds to build instance. [ 858.910824] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Waiting for the task: (returnval){ [ 858.910824] env[62070]: value = "task-1121890" [ 858.910824] env[62070]: _type = "Task" [ 858.910824] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.919232] env[62070]: DEBUG nova.compute.manager [req-7b82741f-02ea-4939-8a70-ee00a2ec8ea5 req-a66ec626-7ce5-4816-ae17-4e66bf3ea19c service nova] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Received event network-changed-32f47285-7ff0-405e-849d-27e73999e359 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 858.919529] env[62070]: DEBUG nova.compute.manager [req-7b82741f-02ea-4939-8a70-ee00a2ec8ea5 req-a66ec626-7ce5-4816-ae17-4e66bf3ea19c service nova] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Refreshing instance network info cache due to event network-changed-32f47285-7ff0-405e-849d-27e73999e359. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 858.919775] env[62070]: DEBUG oslo_concurrency.lockutils [req-7b82741f-02ea-4939-8a70-ee00a2ec8ea5 req-a66ec626-7ce5-4816-ae17-4e66bf3ea19c service nova] Acquiring lock "refresh_cache-fb054a32-c1aa-4884-a087-da5ad34cf3c4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.919936] env[62070]: DEBUG oslo_concurrency.lockutils [req-7b82741f-02ea-4939-8a70-ee00a2ec8ea5 req-a66ec626-7ce5-4816-ae17-4e66bf3ea19c service nova] Acquired lock "refresh_cache-fb054a32-c1aa-4884-a087-da5ad34cf3c4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.920230] env[62070]: DEBUG nova.network.neutron [req-7b82741f-02ea-4939-8a70-ee00a2ec8ea5 req-a66ec626-7ce5-4816-ae17-4e66bf3ea19c service nova] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Refreshing network info cache for port 32f47285-7ff0-405e-849d-27e73999e359 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 858.925430] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Task: {'id': task-1121890, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.983559] env[62070]: ERROR nova.scheduler.client.report [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] [req-eb30a05c-02e9-4a9b-9bf1-8b3c46cb909c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 21c7c111-1b69-4468-b2c4-5dd96014fbd6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-eb30a05c-02e9-4a9b-9bf1-8b3c46cb909c"}]} [ 859.005272] env[62070]: DEBUG nova.scheduler.client.report [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Refreshing inventories for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 859.022402] env[62070]: DEBUG nova.scheduler.client.report [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Updating ProviderTree inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 859.022544] env[62070]: DEBUG nova.compute.provider_tree [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 859.036547] env[62070]: DEBUG nova.scheduler.client.report [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Refreshing aggregate associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, aggregates: None {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 859.058120] env[62070]: DEBUG nova.scheduler.client.report [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Refreshing trait associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 859.117383] env[62070]: DEBUG oslo_concurrency.lockutils [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.119564] env[62070]: INFO nova.compute.manager [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Took 27.37 seconds to build instance. [ 859.204021] env[62070]: INFO nova.compute.manager [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Rebuilding instance [ 859.251536] env[62070]: DEBUG nova.compute.manager [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 859.252500] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a8a2ab-d941-4839-84ce-0d07a88f0a72 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.287915] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121889, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.320917] env[62070]: DEBUG nova.network.neutron [-] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.405513] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d1ae98c6-774b-4beb-8cc9-59c38f859b35 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "62758a38-4819-4d5a-97ed-db6c9ceb97bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.033s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.429201] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Task: {'id': task-1121890, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.542108] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca02594a-95e5-4d3a-bde5-0f4406311e54 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.551217] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647988ea-fc87-4f05-b76f-6638ea9b33bb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.590716] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ba171a-136f-4039-b7b7-6764ea7ea622 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.600141] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21756df7-f9ac-4e3e-b3d6-2050b63cbba4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.613997] env[62070]: DEBUG nova.compute.provider_tree [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 859.620997] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ce7278d1-f76f-4623-9cfe-51d678cfa5d5 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Lock "61ab347d-1342-4f59-8955-10d575993b77" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.897s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.768356] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 859.773111] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-881e7197-efbb-49d0-ac6f-2d89f3a2aefb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.781857] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121889, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.783663] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 859.783663] env[62070]: value = "task-1121891" [ 859.783663] env[62070]: _type = "Task" [ 859.783663] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.796400] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] VM already powered off {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 859.796749] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 859.798103] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d429f38c-d1dc-4c58-a3dd-85921e39d629 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.813891] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 859.814208] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-82902485-022b-4603-b04a-c25c7f93d241 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.818485] env[62070]: DEBUG nova.network.neutron [req-7b82741f-02ea-4939-8a70-ee00a2ec8ea5 req-a66ec626-7ce5-4816-ae17-4e66bf3ea19c service nova] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Updated VIF entry in instance network info cache for port 32f47285-7ff0-405e-849d-27e73999e359. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 859.818848] env[62070]: DEBUG nova.network.neutron [req-7b82741f-02ea-4939-8a70-ee00a2ec8ea5 req-a66ec626-7ce5-4816-ae17-4e66bf3ea19c service nova] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Updating instance_info_cache with network_info: [{"id": "32f47285-7ff0-405e-849d-27e73999e359", "address": "fa:16:3e:dd:6e:ec", "network": {"id": "4888f989-958d-49ff-bf5a-06873e4cc624", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-906255456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d079c0ef3ed745fcaf69dc728dca4466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32f47285-7f", "ovs_interfaceid": "32f47285-7ff0-405e-849d-27e73999e359", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.821649] env[62070]: INFO nova.compute.manager [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Rebuilding instance [ 859.825062] env[62070]: INFO nova.compute.manager [-] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Took 1.27 seconds to deallocate network for instance. [ 859.877721] env[62070]: DEBUG nova.compute.manager [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 859.878649] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ab78bc6-00c2-48bd-9f94-e0cb3918bc4d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.927831] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Task: {'id': task-1121890, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.599711} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.928125] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 4bba7448-69f7-4764-9ae6-eb6585f71515/4bba7448-69f7-4764-9ae6-eb6585f71515.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 859.928348] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 859.928607] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8f721e5f-4cf0-4722-8dab-49e0303d52dd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.937957] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Waiting for the task: (returnval){ [ 859.937957] env[62070]: value = "task-1121893" [ 859.937957] env[62070]: _type = "Task" [ 859.937957] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.950443] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Task: {'id': task-1121893, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.951897] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 859.952142] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 859.952345] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Deleting the datastore file [datastore2] c16d175c-0b23-4f72-bdb0-844c6f80fd32 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 859.952648] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-152cd3b4-b6a9-4a3d-91c4-cd92a7c6a5f1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.960633] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 859.960633] env[62070]: value = "task-1121894" [ 859.960633] env[62070]: _type = "Task" [ 859.960633] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.970992] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121894, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.116922] env[62070]: DEBUG nova.scheduler.client.report [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 860.279024] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121889, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.325048] env[62070]: DEBUG oslo_concurrency.lockutils [req-7b82741f-02ea-4939-8a70-ee00a2ec8ea5 req-a66ec626-7ce5-4816-ae17-4e66bf3ea19c service nova] Releasing lock "refresh_cache-fb054a32-c1aa-4884-a087-da5ad34cf3c4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.331382] env[62070]: DEBUG oslo_concurrency.lockutils [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.393123] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 860.393458] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e800eb36-aee1-466b-9dac-9eafe907886e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.402471] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Waiting for the task: (returnval){ [ 860.402471] env[62070]: value = "task-1121895" [ 860.402471] env[62070]: _type = "Task" [ 860.402471] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.413069] env[62070]: DEBUG oslo_concurrency.lockutils [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "62758a38-4819-4d5a-97ed-db6c9ceb97bf" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.413469] env[62070]: DEBUG oslo_concurrency.lockutils [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "62758a38-4819-4d5a-97ed-db6c9ceb97bf" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.413791] env[62070]: INFO nova.compute.manager [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Shelving [ 860.415847] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121895, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.449881] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Task: {'id': task-1121893, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.392674} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.450179] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 860.450981] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f141cfd6-3c58-4543-99d2-5cf49435a32c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.474550] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] 4bba7448-69f7-4764-9ae6-eb6585f71515/4bba7448-69f7-4764-9ae6-eb6585f71515.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 860.478032] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-453a98a4-e83e-46c1-b4ce-7b1f0275d29b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.499255] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121894, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.500891] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Waiting for the task: (returnval){ [ 860.500891] env[62070]: value = "task-1121896" [ 860.500891] env[62070]: _type = "Task" [ 860.500891] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.510070] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Task: {'id': task-1121896, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.624079] env[62070]: DEBUG oslo_concurrency.lockutils [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.187s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.625733] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.519s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.627970] env[62070]: INFO nova.compute.claims [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 860.650599] env[62070]: INFO nova.scheduler.client.report [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Deleted allocations for instance d0914f90-200c-4715-aaab-54beacf339b9 [ 860.780166] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121889, 'name': CreateVM_Task, 'duration_secs': 1.554947} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.780412] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 860.782521] env[62070]: DEBUG oslo_concurrency.lockutils [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 860.782521] env[62070]: DEBUG oslo_concurrency.lockutils [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.782521] env[62070]: DEBUG oslo_concurrency.lockutils [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 860.782521] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e2fe919-5163-4f88-90db-83fd43a67001 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.789608] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 860.789608] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5251b924-bcd5-9909-3620-7682349e73c1" [ 860.789608] env[62070]: _type = "Task" [ 860.789608] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.803022] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5251b924-bcd5-9909-3620-7682349e73c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.913498] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121895, 'name': PowerOffVM_Task, 'duration_secs': 0.277597} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.913825] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 860.914096] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 860.914998] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a54a377e-d485-475e-b775-2b9a56bfd33b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.926024] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 860.926635] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 860.926873] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d4f7a759-dadb-491b-8531-56b5872af018 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.928626] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-005f3f99-0b77-4e39-86ba-a36dd9befd87 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.938244] env[62070]: DEBUG oslo_vmware.api [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 860.938244] env[62070]: value = "task-1121898" [ 860.938244] env[62070]: _type = "Task" [ 860.938244] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.947863] env[62070]: DEBUG oslo_vmware.api [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121898, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.950350] env[62070]: DEBUG nova.compute.manager [req-655e65f0-7354-42b7-91dd-d76af9443769 req-3a231138-1d5a-493b-bc5b-39fb1634666c service nova] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Received event network-vif-deleted-6444a30f-9c50-4eaf-b562-178b627dc0f1 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 860.978460] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121894, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.677065} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.978817] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 860.979114] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 860.979361] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 860.985150] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 860.985453] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 860.985558] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Deleting the datastore file [datastore2] 61ab347d-1342-4f59-8955-10d575993b77 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 860.985862] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d5026614-fa90-433f-b9f5-bcf433375e48 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.994067] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Waiting for the task: (returnval){ [ 860.994067] env[62070]: value = "task-1121899" [ 860.994067] env[62070]: _type = "Task" [ 860.994067] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.003537] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121899, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.012109] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Task: {'id': task-1121896, 'name': ReconfigVM_Task, 'duration_secs': 0.478791} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.012402] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Reconfigured VM instance instance-00000047 to attach disk [datastore2] 4bba7448-69f7-4764-9ae6-eb6585f71515/4bba7448-69f7-4764-9ae6-eb6585f71515.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 861.013055] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-79e9c18b-9c23-44f2-8fda-d2eb8cd2472c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.021401] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Waiting for the task: (returnval){ [ 861.021401] env[62070]: value = "task-1121900" [ 861.021401] env[62070]: _type = "Task" [ 861.021401] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.033220] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Task: {'id': task-1121900, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.158973] env[62070]: DEBUG oslo_concurrency.lockutils [None req-045c7ab7-65c9-449f-b5dd-885011718b78 tempest-ServersTestManualDisk-1129474164 tempest-ServersTestManualDisk-1129474164-project-member] Lock "d0914f90-200c-4715-aaab-54beacf339b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.915s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.301053] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5251b924-bcd5-9909-3620-7682349e73c1, 'name': SearchDatastore_Task, 'duration_secs': 0.011382} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.301573] env[62070]: DEBUG oslo_concurrency.lockutils [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.301671] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 861.301885] env[62070]: DEBUG oslo_concurrency.lockutils [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.302050] env[62070]: DEBUG oslo_concurrency.lockutils [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.302239] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 861.303646] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-40959efb-8e90-40d0-81e2-cc94534198dc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.321630] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 861.321968] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 861.322909] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9336002-6fc2-45cf-9a7b-bdb8a846bf35 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.329322] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 861.329322] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]527246f0-7f72-726a-e875-5ec1e6594cb6" [ 861.329322] env[62070]: _type = "Task" [ 861.329322] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.338020] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527246f0-7f72-726a-e875-5ec1e6594cb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.451549] env[62070]: DEBUG oslo_vmware.api [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121898, 'name': PowerOffVM_Task, 'duration_secs': 0.194722} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.451942] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 861.453076] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-122d3f3e-07c0-49d2-b962-bd433f1641c9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.475212] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f593ce-4508-4ebf-b47b-1969cbede7cd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.504800] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121899, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.125029} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.507404] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 861.507404] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 861.507404] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 861.532328] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Task: {'id': task-1121900, 'name': Rename_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.845721] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527246f0-7f72-726a-e875-5ec1e6594cb6, 'name': SearchDatastore_Task, 'duration_secs': 0.031549} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.846554] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fd4208a-b9ab-4ab0-a2d1-b25b1117a7ca {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.853811] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 861.853811] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52647a5f-6661-4f4d-ae8b-21c59631e255" [ 861.853811] env[62070]: _type = "Task" [ 861.853811] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.863709] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52647a5f-6661-4f4d-ae8b-21c59631e255, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.990506] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Creating Snapshot of the VM instance {{(pid=62070) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 861.993661] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5bd51107-9341-4f69-9d5a-e048f332cde3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.002414] env[62070]: DEBUG oslo_vmware.api [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 862.002414] env[62070]: value = "task-1121901" [ 862.002414] env[62070]: _type = "Task" [ 862.002414] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.022479] env[62070]: DEBUG oslo_vmware.api [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121901, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.025597] env[62070]: DEBUG nova.virt.hardware [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 862.025959] env[62070]: DEBUG nova.virt.hardware [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 862.026036] env[62070]: DEBUG nova.virt.hardware [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 862.026225] env[62070]: DEBUG nova.virt.hardware [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 862.026376] env[62070]: DEBUG nova.virt.hardware [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 862.026529] env[62070]: DEBUG nova.virt.hardware [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 862.026776] env[62070]: DEBUG nova.virt.hardware [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 862.027053] env[62070]: DEBUG nova.virt.hardware [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 862.028068] env[62070]: DEBUG nova.virt.hardware [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 862.028068] env[62070]: DEBUG nova.virt.hardware [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 862.028068] env[62070]: DEBUG nova.virt.hardware [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 862.031713] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d26abe26-f70e-4908-99ef-78dc8fab39a6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.044938] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f3bde6c-d9db-4e6e-9d73-e9af80611f2a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.048646] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Task: {'id': task-1121900, 'name': Rename_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.064797] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:c5:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cf5bfbae-a882-4d34-be33-b31e274b3077', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5a04b813-465f-4855-8707-4db273ba30fd', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 862.072068] env[62070]: DEBUG oslo.service.loopingcall [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 862.073257] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 862.074213] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef4cc9e9-dc2d-47ab-a0bb-88beac683639 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.076697] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c4c4c9eb-af48-4682-bcb0-755cb32ac536 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.097241] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b29f4f-ad0b-4ee2-9a41-d113decb917f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.101930] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 862.101930] env[62070]: value = "task-1121902" [ 862.101930] env[62070]: _type = "Task" [ 862.101930] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.130678] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d38663e-c893-4e2d-a2b9-9094abd3aebb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.136309] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121902, 'name': CreateVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.142458] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-609414e8-39ee-439c-afcf-3f5ac53b155b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.158147] env[62070]: DEBUG nova.compute.provider_tree [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 862.364882] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52647a5f-6661-4f4d-ae8b-21c59631e255, 'name': SearchDatastore_Task, 'duration_secs': 0.01047} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.365168] env[62070]: DEBUG oslo_concurrency.lockutils [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.365440] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] fb054a32-c1aa-4884-a087-da5ad34cf3c4/fb054a32-c1aa-4884-a087-da5ad34cf3c4.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 862.365717] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7539fab7-0a26-41fb-b998-bb72efc1aabd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.377840] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 862.377840] env[62070]: value = "task-1121903" [ 862.377840] env[62070]: _type = "Task" [ 862.377840] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.392059] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121903, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.514139] env[62070]: DEBUG oslo_vmware.api [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121901, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.537483] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Task: {'id': task-1121900, 'name': Rename_Task, 'duration_secs': 1.167025} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.538026] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 862.538515] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8247178b-03c6-46b2-b130-2915b5c8e23e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.549500] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Waiting for the task: (returnval){ [ 862.549500] env[62070]: value = "task-1121904" [ 862.549500] env[62070]: _type = "Task" [ 862.549500] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.559999] env[62070]: DEBUG nova.virt.hardware [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 862.560556] env[62070]: DEBUG nova.virt.hardware [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 862.561880] env[62070]: DEBUG nova.virt.hardware [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 862.561880] env[62070]: DEBUG nova.virt.hardware [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 862.561880] env[62070]: DEBUG nova.virt.hardware [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 862.561880] env[62070]: DEBUG nova.virt.hardware [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 862.561880] env[62070]: DEBUG nova.virt.hardware [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 862.561880] env[62070]: DEBUG nova.virt.hardware [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 862.562528] env[62070]: DEBUG nova.virt.hardware [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 862.562890] env[62070]: DEBUG nova.virt.hardware [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 862.563272] env[62070]: DEBUG nova.virt.hardware [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 862.564472] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6fc6c7b-6be4-40df-8bd3-5f02ef5a0f5a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.578609] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-778b7cd8-b44c-4afc-ae3e-540c944ec2cc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.583207] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Task: {'id': task-1121904, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.600422] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Instance VIF info [] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 862.606369] env[62070]: DEBUG oslo.service.loopingcall [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 862.607308] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 862.614023] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be42830f-68fc-4e68-9845-7e82896347ec {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.636258] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121902, 'name': CreateVM_Task, 'duration_secs': 0.469478} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.637703] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 862.637923] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 862.637923] env[62070]: value = "task-1121905" [ 862.637923] env[62070]: _type = "Task" [ 862.637923] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.638673] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.638844] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.639261] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 862.639572] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66f82990-1bad-4b05-8e74-a2b017ad6eb2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.654026] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121905, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.654026] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 862.654026] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5213d48a-b84e-935a-eb0e-b77f7cf8b91d" [ 862.654026] env[62070]: _type = "Task" [ 862.654026] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.663737] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5213d48a-b84e-935a-eb0e-b77f7cf8b91d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.708767] env[62070]: DEBUG nova.scheduler.client.report [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Updated inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with generation 96 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 862.709523] env[62070]: DEBUG nova.compute.provider_tree [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Updating resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 generation from 96 to 97 during operation: update_inventory {{(pid=62070) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 862.709523] env[62070]: DEBUG nova.compute.provider_tree [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 862.888726] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121903, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.014250] env[62070]: DEBUG oslo_vmware.api [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121901, 'name': CreateSnapshot_Task, 'duration_secs': 0.876975} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.014645] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Created Snapshot of the VM instance {{(pid=62070) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 863.015366] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1089fd4b-244b-4126-bde1-f25dca35a06b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.068752] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Task: {'id': task-1121904, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.150810] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121905, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.163847] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5213d48a-b84e-935a-eb0e-b77f7cf8b91d, 'name': SearchDatastore_Task, 'duration_secs': 0.063901} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.164846] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.165154] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 863.165411] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.165565] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.165793] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 863.166100] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d7380f40-e35d-4a32-8a23-55e97d600349 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.191786] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 863.191786] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 863.191786] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b47367d-3ef9-4c4d-935a-3900f689e844 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.199112] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 863.199112] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]521efc28-8956-c934-5ce4-2dfae163a2bc" [ 863.199112] env[62070]: _type = "Task" [ 863.199112] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.207320] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]521efc28-8956-c934-5ce4-2dfae163a2bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.219472] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.594s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.220081] env[62070]: DEBUG nova.compute.manager [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 863.222741] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.900s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.222978] env[62070]: DEBUG nova.objects.instance [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lazy-loading 'resources' on Instance uuid f75ed36e-16c8-4a6b-bd39-eb4057ef0691 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 863.392018] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121903, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517784} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.392285] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] fb054a32-c1aa-4884-a087-da5ad34cf3c4/fb054a32-c1aa-4884-a087-da5ad34cf3c4.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 863.392503] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 863.392763] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e6fdd922-b708-46dd-b016-375973f717c6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.401068] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 863.401068] env[62070]: value = "task-1121906" [ 863.401068] env[62070]: _type = "Task" [ 863.401068] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.411221] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121906, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.540053] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Creating linked-clone VM from snapshot {{(pid=62070) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 863.540907] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-34d3a6bc-2206-402d-9e75-49d109cecab8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.563769] env[62070]: DEBUG oslo_vmware.api [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Task: {'id': task-1121904, 'name': PowerOnVM_Task, 'duration_secs': 0.802447} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.565617] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 863.566027] env[62070]: INFO nova.compute.manager [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Took 10.25 seconds to spawn the instance on the hypervisor. [ 863.566378] env[62070]: DEBUG nova.compute.manager [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 863.566909] env[62070]: DEBUG oslo_vmware.api [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 863.566909] env[62070]: value = "task-1121907" [ 863.566909] env[62070]: _type = "Task" [ 863.566909] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.567944] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f38ba4-6c51-4d48-9d67-4fde1a02e77e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.579020] env[62070]: DEBUG oslo_vmware.api [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121907, 'name': CloneVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.652589] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121905, 'name': CreateVM_Task, 'duration_secs': 0.667762} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.652589] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 863.657459] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.657459] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.657459] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 863.657459] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3850e075-dede-4063-b914-5d3f2ea40518 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.664040] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Waiting for the task: (returnval){ [ 863.664040] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]528d3ae5-12b8-084b-688b-efe3fd60d3ef" [ 863.664040] env[62070]: _type = "Task" [ 863.664040] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.672162] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]528d3ae5-12b8-084b-688b-efe3fd60d3ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.713738] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]521efc28-8956-c934-5ce4-2dfae163a2bc, 'name': SearchDatastore_Task, 'duration_secs': 0.018662} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.714590] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e60ccccb-9f5a-4a6c-83ef-051e05c4fc83 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.721742] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 863.721742] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52deee23-6549-c646-2bff-fc7b72d7f43c" [ 863.721742] env[62070]: _type = "Task" [ 863.721742] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.727431] env[62070]: DEBUG nova.compute.utils [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 863.728809] env[62070]: DEBUG nova.compute.manager [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 863.729110] env[62070]: DEBUG nova.network.neutron [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 863.740768] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52deee23-6549-c646-2bff-fc7b72d7f43c, 'name': SearchDatastore_Task, 'duration_secs': 0.011199} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.741336] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.741742] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] c16d175c-0b23-4f72-bdb0-844c6f80fd32/c16d175c-0b23-4f72-bdb0-844c6f80fd32.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 863.742184] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-58b59646-f057-4357-8ffd-b4fa4206da6e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.751917] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 863.751917] env[62070]: value = "task-1121908" [ 863.751917] env[62070]: _type = "Task" [ 863.751917] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.763556] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121908, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.808913] env[62070]: DEBUG nova.policy [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '093d4b68ffd04d4d951f5be91bfc76e8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eac8e5edc8f14fff89aba7c8cb6cac5d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 863.911346] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121906, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071683} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.917021] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 863.917021] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f39accc4-bfae-4462-8159-c8271cd82ef5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.940205] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] fb054a32-c1aa-4884-a087-da5ad34cf3c4/fb054a32-c1aa-4884-a087-da5ad34cf3c4.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 863.943448] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3eadd617-96a4-46de-870d-3788f44ddfa9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.967449] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 863.967449] env[62070]: value = "task-1121909" [ 863.967449] env[62070]: _type = "Task" [ 863.967449] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.978255] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121909, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.090525] env[62070]: DEBUG oslo_vmware.api [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121907, 'name': CloneVM_Task} progress is 94%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.095105] env[62070]: INFO nova.compute.manager [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Took 32.15 seconds to build instance. [ 864.175340] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]528d3ae5-12b8-084b-688b-efe3fd60d3ef, 'name': SearchDatastore_Task, 'duration_secs': 0.011805} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.175671] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.175978] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 864.176765] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.176949] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.177168] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 864.177479] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cdfa9212-d920-4525-94b4-28e90091d6d7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.197417] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 864.197695] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 864.198578] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5843dda2-e28c-464f-9ada-631d3c663834 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.209569] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Waiting for the task: (returnval){ [ 864.209569] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52a44d58-7665-ca54-676e-7f4d22774a09" [ 864.209569] env[62070]: _type = "Task" [ 864.209569] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.225267] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52a44d58-7665-ca54-676e-7f4d22774a09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.236147] env[62070]: DEBUG nova.compute.manager [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 864.251311] env[62070]: DEBUG nova.network.neutron [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Successfully created port: f1b26c6a-d4b8-49c8-b247-27a2e9e76076 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 864.267820] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121908, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.295807] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e72db2-8204-4812-8bc3-c2857a731c86 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.308010] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c90890-7d94-41bf-a8f8-da8879b8fd02 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.343389] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf762f3f-430f-4ad8-9240-a307b8d4fba4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.353595] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1d2012-3252-458c-a780-49c2d2a9f9b5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.368896] env[62070]: DEBUG nova.compute.provider_tree [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 864.479489] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121909, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.582796] env[62070]: DEBUG oslo_vmware.api [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121907, 'name': CloneVM_Task} progress is 94%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.603587] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e0c1f7c9-4467-40ac-bc84-3c816df5b227 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Lock "4bba7448-69f7-4764-9ae6-eb6585f71515" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.670s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.725472] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52a44d58-7665-ca54-676e-7f4d22774a09, 'name': SearchDatastore_Task, 'duration_secs': 0.057607} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.725998] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00981e46-f199-4608-b823-79e272df7894 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.733226] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Waiting for the task: (returnval){ [ 864.733226] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52b3a766-ef57-a055-b6f5-79801ed2931a" [ 864.733226] env[62070]: _type = "Task" [ 864.733226] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.747178] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52b3a766-ef57-a055-b6f5-79801ed2931a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.763260] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121908, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.619186} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.763541] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] c16d175c-0b23-4f72-bdb0-844c6f80fd32/c16d175c-0b23-4f72-bdb0-844c6f80fd32.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 864.763922] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 864.764228] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4b6e397a-730f-4e0b-a7e0-7564d1dba10a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.776610] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 864.776610] env[62070]: value = "task-1121910" [ 864.776610] env[62070]: _type = "Task" [ 864.776610] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.786020] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121910, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.915021] env[62070]: DEBUG nova.scheduler.client.report [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Updated inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with generation 97 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 864.915021] env[62070]: DEBUG nova.compute.provider_tree [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Updating resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 generation from 97 to 98 during operation: update_inventory {{(pid=62070) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 864.915021] env[62070]: DEBUG nova.compute.provider_tree [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 864.980840] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121909, 'name': ReconfigVM_Task, 'duration_secs': 0.674531} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.981570] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Reconfigured VM instance instance-00000048 to attach disk [datastore2] fb054a32-c1aa-4884-a087-da5ad34cf3c4/fb054a32-c1aa-4884-a087-da5ad34cf3c4.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 864.982400] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5c3a4070-5154-43ef-b71c-93373ad689a0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.990644] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 864.990644] env[62070]: value = "task-1121911" [ 864.990644] env[62070]: _type = "Task" [ 864.990644] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.002980] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121911, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.083536] env[62070]: DEBUG oslo_vmware.api [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121907, 'name': CloneVM_Task} progress is 94%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.245016] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52b3a766-ef57-a055-b6f5-79801ed2931a, 'name': SearchDatastore_Task, 'duration_secs': 0.022894} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.247371] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.247371] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 61ab347d-1342-4f59-8955-10d575993b77/61ab347d-1342-4f59-8955-10d575993b77.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 865.247371] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e7aad908-1ef5-4d28-803d-75a05d0b6e64 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.249534] env[62070]: DEBUG nova.compute.manager [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 865.262956] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Waiting for the task: (returnval){ [ 865.262956] env[62070]: value = "task-1121912" [ 865.262956] env[62070]: _type = "Task" [ 865.262956] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.281108] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121912, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.283402] env[62070]: DEBUG nova.virt.hardware [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c6b22aeee7557c7dd5d8cdd2ccbd233c',container_format='bare',created_at=2024-10-03T09:27:42Z,direct_url=,disk_format='vmdk',id=f721b0bd-3ff9-47e3-950c-4df5b98cd172,min_disk=1,min_ram=0,name='tempest-test-snap-1291218742',owner='eac8e5edc8f14fff89aba7c8cb6cac5d',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-10-03T09:27:58Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 865.283722] env[62070]: DEBUG nova.virt.hardware [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 865.284135] env[62070]: DEBUG nova.virt.hardware [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 865.284135] env[62070]: DEBUG nova.virt.hardware [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 865.284340] env[62070]: DEBUG nova.virt.hardware [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 865.284611] env[62070]: DEBUG nova.virt.hardware [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 865.284900] env[62070]: DEBUG nova.virt.hardware [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 865.285116] env[62070]: DEBUG nova.virt.hardware [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 865.285352] env[62070]: DEBUG nova.virt.hardware [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 865.285913] env[62070]: DEBUG nova.virt.hardware [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 865.286145] env[62070]: DEBUG nova.virt.hardware [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 865.291769] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d770085a-5046-41fa-9dc7-15851bf74b4d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.301712] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121910, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.203686} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.303986] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 865.304821] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f91935-818c-420e-a1a9-f6a6b36c1d86 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.308598] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c95900-9a1b-4a9a-baa9-189439c3c147 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.347444] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] c16d175c-0b23-4f72-bdb0-844c6f80fd32/c16d175c-0b23-4f72-bdb0-844c6f80fd32.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 865.347803] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-13e9b7aa-cd5a-41cd-98e1-2a32108ff3a1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.370084] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 865.370084] env[62070]: value = "task-1121913" [ 865.370084] env[62070]: _type = "Task" [ 865.370084] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.379329] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121913, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.420047] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.197s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.422620] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.959s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.422905] env[62070]: DEBUG nova.objects.instance [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lazy-loading 'resources' on Instance uuid 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 865.449938] env[62070]: INFO nova.scheduler.client.report [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Deleted allocations for instance f75ed36e-16c8-4a6b-bd39-eb4057ef0691 [ 865.504271] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121911, 'name': Rename_Task, 'duration_secs': 0.473058} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.504490] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 865.504792] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a5b1c121-59ef-4572-a997-08e3df641ff1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.514008] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 865.514008] env[62070]: value = "task-1121914" [ 865.514008] env[62070]: _type = "Task" [ 865.514008] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.522874] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121914, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.584187] env[62070]: DEBUG oslo_vmware.api [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121907, 'name': CloneVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.769245] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121912, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.773663] env[62070]: DEBUG nova.compute.manager [req-851ce0c3-9bbd-4a50-a3ce-942416391695 req-21593b4c-e69f-4c13-ba19-f3d3d004a71c service nova] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Received event network-vif-plugged-f1b26c6a-d4b8-49c8-b247-27a2e9e76076 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 865.773663] env[62070]: DEBUG oslo_concurrency.lockutils [req-851ce0c3-9bbd-4a50-a3ce-942416391695 req-21593b4c-e69f-4c13-ba19-f3d3d004a71c service nova] Acquiring lock "58146b84-7589-4f21-bdab-605cee535e55-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.773663] env[62070]: DEBUG oslo_concurrency.lockutils [req-851ce0c3-9bbd-4a50-a3ce-942416391695 req-21593b4c-e69f-4c13-ba19-f3d3d004a71c service nova] Lock "58146b84-7589-4f21-bdab-605cee535e55-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.773663] env[62070]: DEBUG oslo_concurrency.lockutils [req-851ce0c3-9bbd-4a50-a3ce-942416391695 req-21593b4c-e69f-4c13-ba19-f3d3d004a71c service nova] Lock "58146b84-7589-4f21-bdab-605cee535e55-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.773663] env[62070]: DEBUG nova.compute.manager [req-851ce0c3-9bbd-4a50-a3ce-942416391695 req-21593b4c-e69f-4c13-ba19-f3d3d004a71c service nova] [instance: 58146b84-7589-4f21-bdab-605cee535e55] No waiting events found dispatching network-vif-plugged-f1b26c6a-d4b8-49c8-b247-27a2e9e76076 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 865.774239] env[62070]: WARNING nova.compute.manager [req-851ce0c3-9bbd-4a50-a3ce-942416391695 req-21593b4c-e69f-4c13-ba19-f3d3d004a71c service nova] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Received unexpected event network-vif-plugged-f1b26c6a-d4b8-49c8-b247-27a2e9e76076 for instance with vm_state building and task_state spawning. [ 865.869235] env[62070]: DEBUG nova.network.neutron [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Successfully updated port: f1b26c6a-d4b8-49c8-b247-27a2e9e76076 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 865.885455] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121913, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.961340] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3861d588-4806-4a38-9554-f7a6f721db6e tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "f75ed36e-16c8-4a6b-bd39-eb4057ef0691" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.029s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.026558] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121914, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.086322] env[62070]: DEBUG oslo_vmware.api [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121907, 'name': CloneVM_Task, 'duration_secs': 2.086748} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.087068] env[62070]: INFO nova.virt.vmwareapi.vmops [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Created linked-clone VM from snapshot [ 866.088026] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d96e9979-fb79-4f85-8146-6c469c96a568 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.098156] env[62070]: DEBUG nova.virt.vmwareapi.images [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Uploading image 68e7a958-3cd7-4176-af06-dab64f015559 {{(pid=62070) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 866.107897] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Acquiring lock "4bba7448-69f7-4764-9ae6-eb6585f71515" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.108200] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Lock "4bba7448-69f7-4764-9ae6-eb6585f71515" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.108454] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Acquiring lock "4bba7448-69f7-4764-9ae6-eb6585f71515-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.108679] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Lock "4bba7448-69f7-4764-9ae6-eb6585f71515-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.108888] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Lock "4bba7448-69f7-4764-9ae6-eb6585f71515-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.111012] env[62070]: INFO nova.compute.manager [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Terminating instance [ 866.113566] env[62070]: DEBUG nova.compute.manager [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 866.113906] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 866.114791] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05bdc2f8-84e8-4d16-b386-8fb26bdc76da {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.127236] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 866.127506] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b59fba1-328f-41e8-a746-645e6baf3c1c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.135258] env[62070]: DEBUG oslo_vmware.rw_handles [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 866.135258] env[62070]: value = "vm-245435" [ 866.135258] env[62070]: _type = "VirtualMachine" [ 866.135258] env[62070]: }. {{(pid=62070) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 866.136894] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-34c01050-eb59-4d0c-afc9-82beda3fe263 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.139095] env[62070]: DEBUG oslo_vmware.api [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Waiting for the task: (returnval){ [ 866.139095] env[62070]: value = "task-1121915" [ 866.139095] env[62070]: _type = "Task" [ 866.139095] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.143972] env[62070]: DEBUG oslo_vmware.rw_handles [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lease: (returnval){ [ 866.143972] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ca3608-4f1f-a51b-93a8-11dc1ffde505" [ 866.143972] env[62070]: _type = "HttpNfcLease" [ 866.143972] env[62070]: } obtained for exporting VM: (result){ [ 866.143972] env[62070]: value = "vm-245435" [ 866.143972] env[62070]: _type = "VirtualMachine" [ 866.143972] env[62070]: }. {{(pid=62070) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 866.144394] env[62070]: DEBUG oslo_vmware.api [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the lease: (returnval){ [ 866.144394] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ca3608-4f1f-a51b-93a8-11dc1ffde505" [ 866.144394] env[62070]: _type = "HttpNfcLease" [ 866.144394] env[62070]: } to be ready. {{(pid=62070) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 866.154299] env[62070]: DEBUG oslo_vmware.api [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Task: {'id': task-1121915, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.157681] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 866.157681] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ca3608-4f1f-a51b-93a8-11dc1ffde505" [ 866.157681] env[62070]: _type = "HttpNfcLease" [ 866.157681] env[62070]: } is initializing. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 866.272834] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121912, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.532909} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.277059] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 61ab347d-1342-4f59-8955-10d575993b77/61ab347d-1342-4f59-8955-10d575993b77.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 866.277059] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 866.277059] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e1339f5f-2eab-4e0a-a8c9-6a7c18a8ccba {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.285025] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Waiting for the task: (returnval){ [ 866.285025] env[62070]: value = "task-1121917" [ 866.285025] env[62070]: _type = "Task" [ 866.285025] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.295202] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121917, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.318926] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-259e8797-039f-4087-9582-8db0a25f4780 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.326012] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0c26b1b-7e7e-4f45-b47a-61974359e45e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.367477] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11e0cb32-0165-4a93-bc4b-9c78b2999c0e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.374536] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-696eece6-9a2d-4c60-9078-ce8e8ce98bf5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.379831] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "refresh_cache-58146b84-7589-4f21-bdab-605cee535e55" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.379831] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquired lock "refresh_cache-58146b84-7589-4f21-bdab-605cee535e55" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.380021] env[62070]: DEBUG nova.network.neutron [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 866.395297] env[62070]: DEBUG nova.compute.provider_tree [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 866.400205] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121913, 'name': ReconfigVM_Task, 'duration_secs': 0.650381} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.400677] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Reconfigured VM instance instance-00000044 to attach disk [datastore2] c16d175c-0b23-4f72-bdb0-844c6f80fd32/c16d175c-0b23-4f72-bdb0-844c6f80fd32.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 866.402313] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-59b3af94-18b7-40c8-a143-63a881927431 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.408035] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 866.408035] env[62070]: value = "task-1121918" [ 866.408035] env[62070]: _type = "Task" [ 866.408035] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.417454] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121918, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.525353] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121914, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.652024] env[62070]: DEBUG oslo_vmware.api [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Task: {'id': task-1121915, 'name': PowerOffVM_Task, 'duration_secs': 0.402105} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.653145] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 866.653519] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 866.654204] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-55cc3cb5-be88-49cd-88af-f2ee66107415 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.661723] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 866.661723] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ca3608-4f1f-a51b-93a8-11dc1ffde505" [ 866.661723] env[62070]: _type = "HttpNfcLease" [ 866.661723] env[62070]: } is ready. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 866.661723] env[62070]: DEBUG oslo_vmware.rw_handles [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 866.661723] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ca3608-4f1f-a51b-93a8-11dc1ffde505" [ 866.661723] env[62070]: _type = "HttpNfcLease" [ 866.661723] env[62070]: }. {{(pid=62070) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 866.662570] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee99ffdf-5eec-4a9a-9d06-d28494205c4e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.672402] env[62070]: DEBUG oslo_vmware.rw_handles [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527a9a4b-6bfe-3418-1417-5fe48295fe06/disk-0.vmdk from lease info. {{(pid=62070) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 866.672810] env[62070]: DEBUG oslo_vmware.rw_handles [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527a9a4b-6bfe-3418-1417-5fe48295fe06/disk-0.vmdk for reading. {{(pid=62070) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 866.793121] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121917, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.806282] env[62070]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f02abddf-d164-41fd-a13e-9340f27cceeb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.902400] env[62070]: DEBUG nova.scheduler.client.report [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 866.921762] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121918, 'name': Rename_Task, 'duration_secs': 0.23141} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.923250] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 866.923250] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9893804f-c315-43c7-a125-1d54e6f06605 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.926149] env[62070]: DEBUG nova.network.neutron [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 866.930537] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 866.930747] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 866.930932] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Deleting the datastore file [datastore2] 4bba7448-69f7-4764-9ae6-eb6585f71515 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 866.931446] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e553873c-ef4d-4b16-be95-f56cfbe82857 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.935042] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 866.935042] env[62070]: value = "task-1121920" [ 866.935042] env[62070]: _type = "Task" [ 866.935042] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.939747] env[62070]: DEBUG oslo_vmware.api [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Waiting for the task: (returnval){ [ 866.939747] env[62070]: value = "task-1121921" [ 866.939747] env[62070]: _type = "Task" [ 866.939747] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.946773] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121920, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.954358] env[62070]: DEBUG oslo_vmware.api [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Task: {'id': task-1121921, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.026220] env[62070]: DEBUG oslo_vmware.api [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1121914, 'name': PowerOnVM_Task, 'duration_secs': 1.09555} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.028476] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 867.028702] env[62070]: INFO nova.compute.manager [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Took 11.03 seconds to spawn the instance on the hypervisor. [ 867.028893] env[62070]: DEBUG nova.compute.manager [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 867.030183] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7b2082-8b1b-4756-acfd-e564207416af {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.094065] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "2368b649-f931-454c-92cc-971df4155d90" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.095120] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "2368b649-f931-454c-92cc-971df4155d90" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.095564] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "2368b649-f931-454c-92cc-971df4155d90-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.095937] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "2368b649-f931-454c-92cc-971df4155d90-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.096657] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "2368b649-f931-454c-92cc-971df4155d90-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.100397] env[62070]: INFO nova.compute.manager [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Terminating instance [ 867.103518] env[62070]: DEBUG nova.compute.manager [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 867.104199] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 867.105319] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-539b2636-7395-4be1-ac4e-6c0de215926c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.118616] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 867.118616] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1b2f79e6-6c22-4f13-8057-2c08a6ee8b57 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.129858] env[62070]: DEBUG oslo_vmware.api [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 867.129858] env[62070]: value = "task-1121922" [ 867.129858] env[62070]: _type = "Task" [ 867.129858] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.144371] env[62070]: DEBUG oslo_vmware.api [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121922, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.206589] env[62070]: DEBUG nova.network.neutron [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Updating instance_info_cache with network_info: [{"id": "f1b26c6a-d4b8-49c8-b247-27a2e9e76076", "address": "fa:16:3e:56:f2:fd", "network": {"id": "5f4568f0-c3e8-497f-b7d6-8d92db2f4066", "bridge": "br-int", "label": "tempest-ImagesTestJSON-24352632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eac8e5edc8f14fff89aba7c8cb6cac5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1b26c6a-d4", "ovs_interfaceid": "f1b26c6a-d4b8-49c8-b247-27a2e9e76076", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.296251] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121917, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.555684} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.296251] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 867.297182] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-361ea553-c7f2-4e9c-b065-b23e75805c26 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.322214] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 61ab347d-1342-4f59-8955-10d575993b77/61ab347d-1342-4f59-8955-10d575993b77.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 867.322610] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-510fe8bd-8583-4037-b923-011b5955d8ca {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.344536] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Waiting for the task: (returnval){ [ 867.344536] env[62070]: value = "task-1121923" [ 867.344536] env[62070]: _type = "Task" [ 867.344536] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.353801] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121923, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.407804] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.985s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.411646] env[62070]: DEBUG oslo_concurrency.lockutils [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.465s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.412057] env[62070]: DEBUG nova.objects.instance [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lazy-loading 'resources' on Instance uuid 65fe3720-95cb-4620-b1c7-eae9e3bc3943 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 867.445230] env[62070]: INFO nova.scheduler.client.report [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Deleted allocations for instance 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc [ 867.461423] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121920, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.465125] env[62070]: DEBUG oslo_vmware.api [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Task: {'id': task-1121921, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142408} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.466042] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 867.466042] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 867.466271] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 867.466381] env[62070]: INFO nova.compute.manager [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Took 1.35 seconds to destroy the instance on the hypervisor. [ 867.466674] env[62070]: DEBUG oslo.service.loopingcall [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 867.466961] env[62070]: DEBUG nova.compute.manager [-] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 867.467290] env[62070]: DEBUG nova.network.neutron [-] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 867.559775] env[62070]: INFO nova.compute.manager [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Took 35.18 seconds to build instance. [ 867.640798] env[62070]: DEBUG oslo_vmware.api [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121922, 'name': PowerOffVM_Task, 'duration_secs': 0.245696} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.641136] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 867.641340] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 867.641705] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9301e7f6-112e-4b16-a07c-14e62dcf41bd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.710063] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Releasing lock "refresh_cache-58146b84-7589-4f21-bdab-605cee535e55" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.710480] env[62070]: DEBUG nova.compute.manager [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Instance network_info: |[{"id": "f1b26c6a-d4b8-49c8-b247-27a2e9e76076", "address": "fa:16:3e:56:f2:fd", "network": {"id": "5f4568f0-c3e8-497f-b7d6-8d92db2f4066", "bridge": "br-int", "label": "tempest-ImagesTestJSON-24352632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eac8e5edc8f14fff89aba7c8cb6cac5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1b26c6a-d4", "ovs_interfaceid": "f1b26c6a-d4b8-49c8-b247-27a2e9e76076", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 867.712167] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:f2:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f1b26c6a-d4b8-49c8-b247-27a2e9e76076', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 867.721173] env[62070]: DEBUG oslo.service.loopingcall [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 867.721449] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 867.721781] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 867.722100] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Deleting the datastore file [datastore2] 2368b649-f931-454c-92cc-971df4155d90 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 867.723034] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 867.723857] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad7b576f-57d3-4f67-b074-286e880202db {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.726054] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-27d5e7c7-0dd6-4973-a9ce-b64d22e7572f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.750846] env[62070]: DEBUG oslo_vmware.api [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 867.750846] env[62070]: value = "task-1121925" [ 867.750846] env[62070]: _type = "Task" [ 867.750846] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.751204] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 867.751204] env[62070]: value = "task-1121926" [ 867.751204] env[62070]: _type = "Task" [ 867.751204] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.766054] env[62070]: DEBUG oslo_vmware.api [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121925, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.766406] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121926, 'name': CreateVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.858607] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121923, 'name': ReconfigVM_Task, 'duration_secs': 0.50341} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.858968] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 61ab347d-1342-4f59-8955-10d575993b77/61ab347d-1342-4f59-8955-10d575993b77.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 867.859682] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-77aaf452-597c-42fa-ad36-5a6003388afd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.866051] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Waiting for the task: (returnval){ [ 867.866051] env[62070]: value = "task-1121927" [ 867.866051] env[62070]: _type = "Task" [ 867.866051] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.876984] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121927, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.964205] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bffd3cab-e369-40e8-ad65-4e451f071c3d tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lock "242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.597s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.964205] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121920, 'name': PowerOnVM_Task, 'duration_secs': 0.609575} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.964205] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 867.966344] env[62070]: DEBUG nova.compute.manager [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 867.976921] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b1dba50-dbc1-4894-b40f-7010d5231d84 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.002996] env[62070]: DEBUG nova.compute.manager [req-f505519d-5b88-46d6-8602-062f7eeb922f req-e9319bac-69b7-48ca-a113-6eeec4e1e90a service nova] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Received event network-changed-f1b26c6a-d4b8-49c8-b247-27a2e9e76076 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 868.003478] env[62070]: DEBUG nova.compute.manager [req-f505519d-5b88-46d6-8602-062f7eeb922f req-e9319bac-69b7-48ca-a113-6eeec4e1e90a service nova] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Refreshing instance network info cache due to event network-changed-f1b26c6a-d4b8-49c8-b247-27a2e9e76076. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 868.003938] env[62070]: DEBUG oslo_concurrency.lockutils [req-f505519d-5b88-46d6-8602-062f7eeb922f req-e9319bac-69b7-48ca-a113-6eeec4e1e90a service nova] Acquiring lock "refresh_cache-58146b84-7589-4f21-bdab-605cee535e55" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.004316] env[62070]: DEBUG oslo_concurrency.lockutils [req-f505519d-5b88-46d6-8602-062f7eeb922f req-e9319bac-69b7-48ca-a113-6eeec4e1e90a service nova] Acquired lock "refresh_cache-58146b84-7589-4f21-bdab-605cee535e55" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.004666] env[62070]: DEBUG nova.network.neutron [req-f505519d-5b88-46d6-8602-062f7eeb922f req-e9319bac-69b7-48ca-a113-6eeec4e1e90a service nova] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Refreshing network info cache for port f1b26c6a-d4b8-49c8-b247-27a2e9e76076 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 868.067711] env[62070]: DEBUG oslo_concurrency.lockutils [None req-58349f23-6bf8-4092-ac97-bf388655a094 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "fb054a32-c1aa-4884-a087-da5ad34cf3c4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.704s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.862469] env[62070]: DEBUG nova.network.neutron [-] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.864893] env[62070]: INFO nova.compute.manager [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] bringing vm to original state: 'stopped' [ 868.891643] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Acquiring lock "e51d0146-502a-4ace-856e-b0dbcc11edea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.892879] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Lock "e51d0146-502a-4ace-856e-b0dbcc11edea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.896284] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c5084f72-5a75-4757-ac99-35f7f4d596f6 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.896681] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c5084f72-5a75-4757-ac99-35f7f4d596f6 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.916010] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121926, 'name': CreateVM_Task, 'duration_secs': 0.368595} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.916356] env[62070]: DEBUG oslo_vmware.api [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1121925, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.229277} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.917177] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121927, 'name': Rename_Task, 'duration_secs': 0.167724} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.918814] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 868.918814] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 868.919181] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 868.919441] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 868.919670] env[62070]: INFO nova.compute.manager [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 2368b649-f931-454c-92cc-971df4155d90] Took 1.82 seconds to destroy the instance on the hypervisor. [ 868.920212] env[62070]: DEBUG oslo.service.loopingcall [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 868.920424] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 868.924934] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f721b0bd-3ff9-47e3-950c-4df5b98cd172" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.925492] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f721b0bd-3ff9-47e3-950c-4df5b98cd172" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.926123] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f721b0bd-3ff9-47e3-950c-4df5b98cd172" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 868.926555] env[62070]: DEBUG nova.compute.manager [-] [instance: 2368b649-f931-454c-92cc-971df4155d90] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 868.926669] env[62070]: DEBUG nova.network.neutron [-] [instance: 2368b649-f931-454c-92cc-971df4155d90] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 868.928336] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-38ed2611-73e0-4814-9a7b-689dad5501ee {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.930402] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3561cd5-dac1-4d37-b7f2-00e0796bc053 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.935991] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 868.935991] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52cdce72-ad7a-cbd8-78fb-2faa974d0016" [ 868.935991] env[62070]: _type = "Task" [ 868.935991] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.940846] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Waiting for the task: (returnval){ [ 868.940846] env[62070]: value = "task-1121928" [ 868.940846] env[62070]: _type = "Task" [ 868.940846] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.951840] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52cdce72-ad7a-cbd8-78fb-2faa974d0016, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.954930] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121928, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.210280] env[62070]: DEBUG nova.network.neutron [req-f505519d-5b88-46d6-8602-062f7eeb922f req-e9319bac-69b7-48ca-a113-6eeec4e1e90a service nova] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Updated VIF entry in instance network info cache for port f1b26c6a-d4b8-49c8-b247-27a2e9e76076. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 869.210717] env[62070]: DEBUG nova.network.neutron [req-f505519d-5b88-46d6-8602-062f7eeb922f req-e9319bac-69b7-48ca-a113-6eeec4e1e90a service nova] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Updating instance_info_cache with network_info: [{"id": "f1b26c6a-d4b8-49c8-b247-27a2e9e76076", "address": "fa:16:3e:56:f2:fd", "network": {"id": "5f4568f0-c3e8-497f-b7d6-8d92db2f4066", "bridge": "br-int", "label": "tempest-ImagesTestJSON-24352632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eac8e5edc8f14fff89aba7c8cb6cac5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1b26c6a-d4", "ovs_interfaceid": "f1b26c6a-d4b8-49c8-b247-27a2e9e76076", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.239209] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed22df67-e563-4d64-adc3-05ba7cd38767 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.247561] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b275f44-1e1f-49ae-b31d-69abd689fd35 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.282216] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b29a6b5-7758-4529-8ded-32bbbbf0d8f1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.289944] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b6edd35-3008-4614-a273-4e0c2647341f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.304421] env[62070]: DEBUG nova.compute.provider_tree [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 869.371980] env[62070]: INFO nova.compute.manager [-] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Took 1.90 seconds to deallocate network for instance. [ 869.392023] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Acquiring lock "359ae9f2-a907-459e-99b9-3e043d5d015f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.392825] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lock "359ae9f2-a907-459e-99b9-3e043d5d015f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.393181] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Acquiring lock "359ae9f2-a907-459e-99b9-3e043d5d015f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.393459] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lock "359ae9f2-a907-459e-99b9-3e043d5d015f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.393609] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lock "359ae9f2-a907-459e-99b9-3e043d5d015f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.396639] env[62070]: DEBUG nova.compute.manager [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 869.404022] env[62070]: INFO nova.compute.manager [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Terminating instance [ 869.404413] env[62070]: DEBUG nova.compute.manager [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 869.404725] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 869.406162] env[62070]: DEBUG nova.compute.utils [None req-c5084f72-5a75-4757-ac99-35f7f4d596f6 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 869.408547] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b79180f8-768e-40ed-b80b-0d86faedde22 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.417445] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 869.417968] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-39ea89c4-98e9-4b66-8aeb-09a943e10319 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.426460] env[62070]: DEBUG oslo_vmware.api [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 869.426460] env[62070]: value = "task-1121929" [ 869.426460] env[62070]: _type = "Task" [ 869.426460] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.437321] env[62070]: DEBUG oslo_vmware.api [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121929, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.449631] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f721b0bd-3ff9-47e3-950c-4df5b98cd172" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.449911] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Processing image f721b0bd-3ff9-47e3-950c-4df5b98cd172 {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 869.450178] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f721b0bd-3ff9-47e3-950c-4df5b98cd172/f721b0bd-3ff9-47e3-950c-4df5b98cd172.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.450337] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f721b0bd-3ff9-47e3-950c-4df5b98cd172/f721b0bd-3ff9-47e3-950c-4df5b98cd172.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.450541] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 869.451200] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aa3b844f-8cf9-432a-bc3a-a4c0147ec3bf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.458689] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121928, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.461155] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 869.461349] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 869.462084] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea209f6e-a66c-486c-b29f-bdb4da440810 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.467324] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 869.467324] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52a56cdc-445f-e2e0-e120-2bfe085e844f" [ 869.467324] env[62070]: _type = "Task" [ 869.467324] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.475905] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52a56cdc-445f-e2e0-e120-2bfe085e844f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.718760] env[62070]: DEBUG oslo_concurrency.lockutils [req-f505519d-5b88-46d6-8602-062f7eeb922f req-e9319bac-69b7-48ca-a113-6eeec4e1e90a service nova] Releasing lock "refresh_cache-58146b84-7589-4f21-bdab-605cee535e55" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.785706] env[62070]: DEBUG nova.network.neutron [-] [instance: 2368b649-f931-454c-92cc-971df4155d90] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.807582] env[62070]: DEBUG nova.scheduler.client.report [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 869.846064] env[62070]: DEBUG nova.compute.manager [req-688836c4-d64a-42bc-955c-805cf5bca172 req-2848aee2-305f-4e19-9e99-8bfd8aa3bea2 service nova] [instance: 2368b649-f931-454c-92cc-971df4155d90] Received event network-vif-deleted-903ef3d7-deb3-4330-800a-6c593704799d {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 869.877444] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "c16d175c-0b23-4f72-bdb0-844c6f80fd32" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.877902] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "c16d175c-0b23-4f72-bdb0-844c6f80fd32" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.877969] env[62070]: DEBUG nova.compute.manager [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 869.879117] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7344fb0-3525-496a-ac70-4f13fc89a1ef {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.885395] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.889703] env[62070]: DEBUG nova.compute.manager [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62070) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 869.891797] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 869.893968] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-86b27e31-cc4f-4568-b2e5-57066e11416c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.899715] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 869.899715] env[62070]: value = "task-1121930" [ 869.899715] env[62070]: _type = "Task" [ 869.899715] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.910552] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121930, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.913598] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c5084f72-5a75-4757-ac99-35f7f4d596f6 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.017s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.925032] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.935160] env[62070]: DEBUG oslo_vmware.api [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121929, 'name': PowerOffVM_Task, 'duration_secs': 0.306775} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.935429] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 869.935600] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 869.935855] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-60a4270a-81ec-4ad4-bc07-e19f118de734 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.955216] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121928, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.977916] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Preparing fetch location {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 869.978289] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Fetch image to [datastore2] OSTACK_IMG_c43a14dc-6d83-4b27-acc5-a841457df904/OSTACK_IMG_c43a14dc-6d83-4b27-acc5-a841457df904.vmdk {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 869.978565] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Downloading stream optimized image f721b0bd-3ff9-47e3-950c-4df5b98cd172 to [datastore2] OSTACK_IMG_c43a14dc-6d83-4b27-acc5-a841457df904/OSTACK_IMG_c43a14dc-6d83-4b27-acc5-a841457df904.vmdk on the data store datastore2 as vApp {{(pid=62070) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 869.978816] env[62070]: DEBUG nova.virt.vmwareapi.images [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Downloading image file data f721b0bd-3ff9-47e3-950c-4df5b98cd172 to the ESX as VM named 'OSTACK_IMG_c43a14dc-6d83-4b27-acc5-a841457df904' {{(pid=62070) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 870.028821] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 870.029073] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 870.029263] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Deleting the datastore file [datastore2] 359ae9f2-a907-459e-99b9-3e043d5d015f {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 870.029528] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c16f4ac8-f3ba-4160-b432-bb71a5739e1b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.035644] env[62070]: DEBUG oslo_vmware.api [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for the task: (returnval){ [ 870.035644] env[62070]: value = "task-1121932" [ 870.035644] env[62070]: _type = "Task" [ 870.035644] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.043814] env[62070]: DEBUG oslo_vmware.api [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121932, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.059985] env[62070]: DEBUG oslo_vmware.rw_handles [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 870.059985] env[62070]: value = "resgroup-9" [ 870.059985] env[62070]: _type = "ResourcePool" [ 870.059985] env[62070]: }. {{(pid=62070) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 870.060344] env[62070]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-e4465c47-0e09-4d0d-860b-f4d1de9af876 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.080682] env[62070]: DEBUG oslo_vmware.rw_handles [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lease: (returnval){ [ 870.080682] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52a98a2a-b058-cb93-2fc4-e6388d9a0caf" [ 870.080682] env[62070]: _type = "HttpNfcLease" [ 870.080682] env[62070]: } obtained for vApp import into resource pool (val){ [ 870.080682] env[62070]: value = "resgroup-9" [ 870.080682] env[62070]: _type = "ResourcePool" [ 870.080682] env[62070]: }. {{(pid=62070) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 870.080960] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the lease: (returnval){ [ 870.080960] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52a98a2a-b058-cb93-2fc4-e6388d9a0caf" [ 870.080960] env[62070]: _type = "HttpNfcLease" [ 870.080960] env[62070]: } to be ready. {{(pid=62070) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 870.087366] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 870.087366] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52a98a2a-b058-cb93-2fc4-e6388d9a0caf" [ 870.087366] env[62070]: _type = "HttpNfcLease" [ 870.087366] env[62070]: } is initializing. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 870.288615] env[62070]: INFO nova.compute.manager [-] [instance: 2368b649-f931-454c-92cc-971df4155d90] Took 1.36 seconds to deallocate network for instance. [ 870.312994] env[62070]: DEBUG oslo_concurrency.lockutils [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.901s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.316854] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.862s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.319152] env[62070]: INFO nova.compute.claims [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 870.338379] env[62070]: INFO nova.scheduler.client.report [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Deleted allocations for instance 65fe3720-95cb-4620-b1c7-eae9e3bc3943 [ 870.381581] env[62070]: DEBUG nova.compute.manager [req-8a60d764-4926-4d48-a284-672eb9692ec8 req-860a7af8-863a-4e8c-a3a0-5173c7eea18f service nova] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Received event network-vif-deleted-b1394ea1-e455-496f-9aa1-6eacc606ec0a {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 870.411106] env[62070]: DEBUG oslo_vmware.api [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121930, 'name': PowerOffVM_Task, 'duration_secs': 0.300076} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.411106] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 870.411106] env[62070]: DEBUG nova.compute.manager [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 870.412733] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f965f2d-bdec-41bd-975e-25de516274e5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.458155] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121928, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.550237] env[62070]: DEBUG oslo_vmware.api [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Task: {'id': task-1121932, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.265461} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.550521] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 870.550736] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 870.550934] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 870.551158] env[62070]: INFO nova.compute.manager [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Took 1.15 seconds to destroy the instance on the hypervisor. [ 870.551420] env[62070]: DEBUG oslo.service.loopingcall [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 870.551624] env[62070]: DEBUG nova.compute.manager [-] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 870.551719] env[62070]: DEBUG nova.network.neutron [-] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 870.592208] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 870.592208] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52a98a2a-b058-cb93-2fc4-e6388d9a0caf" [ 870.592208] env[62070]: _type = "HttpNfcLease" [ 870.592208] env[62070]: } is initializing. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 870.676919] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c5084f72-5a75-4757-ac99-35f7f4d596f6 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.677243] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c5084f72-5a75-4757-ac99-35f7f4d596f6 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.677508] env[62070]: INFO nova.compute.manager [None req-c5084f72-5a75-4757-ac99-35f7f4d596f6 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Attaching volume 0a76875f-e286-469c-8be6-887850e6ce36 to /dev/sdb [ 870.711339] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e6177e-7c7d-40aa-b7a6-1f06805ad5df {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.720444] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0712fac1-c5ee-4b81-ab3c-15a2a6910386 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.736514] env[62070]: DEBUG nova.virt.block_device [None req-c5084f72-5a75-4757-ac99-35f7f4d596f6 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Updating existing volume attachment record: 5740da1f-4910-4f1a-885c-bb0b705dec06 {{(pid=62070) _volume_attach /opt/stack/nova/nova/virt/block_device.py:679}} [ 870.795864] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.849031] env[62070]: DEBUG oslo_concurrency.lockutils [None req-759bf497-5e25-4b21-809d-f1952bc85c78 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "65fe3720-95cb-4620-b1c7-eae9e3bc3943" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.205s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.926183] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "c16d175c-0b23-4f72-bdb0-844c6f80fd32" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.048s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.957126] env[62070]: DEBUG oslo_vmware.api [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121928, 'name': PowerOnVM_Task, 'duration_secs': 1.602622} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.957424] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 870.958034] env[62070]: DEBUG nova.compute.manager [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 870.958487] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b84c2c-03d4-4b44-8317-1d17a0f6de84 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.089385] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 871.089385] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52a98a2a-b058-cb93-2fc4-e6388d9a0caf" [ 871.089385] env[62070]: _type = "HttpNfcLease" [ 871.089385] env[62070]: } is ready. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 871.089681] env[62070]: DEBUG oslo_vmware.rw_handles [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 871.089681] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52a98a2a-b058-cb93-2fc4-e6388d9a0caf" [ 871.089681] env[62070]: _type = "HttpNfcLease" [ 871.089681] env[62070]: }. {{(pid=62070) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 871.090432] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c4161bd-adc8-496f-b4b6-d368405c7d63 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.098255] env[62070]: DEBUG oslo_vmware.rw_handles [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523c07b5-65ab-5a9b-da2b-c3cc783431ed/disk-0.vmdk from lease info. {{(pid=62070) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 871.098255] env[62070]: DEBUG oslo_vmware.rw_handles [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523c07b5-65ab-5a9b-da2b-c3cc783431ed/disk-0.vmdk. {{(pid=62070) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 871.167228] env[62070]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1994cc62-9550-41f9-884c-2df4389ecc80 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.312098] env[62070]: DEBUG nova.network.neutron [-] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.437910] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.477581] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.751074] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0bf9236-7020-45f3-9f55-ee6117ee2350 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.763832] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f6ad6c-1e3e-4913-92ac-6e26f75f33eb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.804565] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b3e707f-bdd1-445f-88b1-955ae29c6d14 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.815674] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d50d5f15-b0b0-4c67-844b-771d44d151de {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.822612] env[62070]: INFO nova.compute.manager [-] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Took 1.27 seconds to deallocate network for instance. [ 871.837344] env[62070]: DEBUG nova.compute.provider_tree [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 872.008550] env[62070]: DEBUG nova.compute.manager [req-f7e37fcd-30e1-4863-8028-7b5f96a5013b req-1439f9e2-38c9-4d68-bed4-37c6eb170036 service nova] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Received event network-vif-deleted-98c76f66-e1d2-463c-b1a1-7d539909e2a9 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 872.053801] env[62070]: DEBUG oslo_vmware.rw_handles [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Completed reading data from the image iterator. {{(pid=62070) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 872.054155] env[62070]: DEBUG oslo_vmware.rw_handles [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523c07b5-65ab-5a9b-da2b-c3cc783431ed/disk-0.vmdk. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 872.055158] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb9cfb0b-5101-42fa-aba6-f9efb76072f6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.063652] env[62070]: DEBUG oslo_vmware.rw_handles [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523c07b5-65ab-5a9b-da2b-c3cc783431ed/disk-0.vmdk is in state: ready. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 872.063984] env[62070]: DEBUG oslo_vmware.rw_handles [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523c07b5-65ab-5a9b-da2b-c3cc783431ed/disk-0.vmdk. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 872.065510] env[62070]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-cb1ab51a-b2c2-49e7-acba-5724033d6029 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.104634] env[62070]: DEBUG nova.compute.manager [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Stashing vm_state: active {{(pid=62070) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 872.242479] env[62070]: DEBUG oslo_vmware.rw_handles [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523c07b5-65ab-5a9b-da2b-c3cc783431ed/disk-0.vmdk. {{(pid=62070) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 872.242706] env[62070]: INFO nova.virt.vmwareapi.images [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Downloaded image file data f721b0bd-3ff9-47e3-950c-4df5b98cd172 [ 872.243620] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef14cc1-bc6a-4987-939a-b39e204c40d4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.261025] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-de5e8637-464d-49b6-be86-e686e88b78de {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.286360] env[62070]: INFO nova.virt.vmwareapi.images [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] The imported VM was unregistered [ 872.288866] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Caching image {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 872.289188] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Creating directory with path [datastore2] devstack-image-cache_base/f721b0bd-3ff9-47e3-950c-4df5b98cd172 {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 872.289910] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-56612a7b-5986-49fc-a684-c7430fab6aa9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.302274] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Created directory with path [datastore2] devstack-image-cache_base/f721b0bd-3ff9-47e3-950c-4df5b98cd172 {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 872.302937] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_c43a14dc-6d83-4b27-acc5-a841457df904/OSTACK_IMG_c43a14dc-6d83-4b27-acc5-a841457df904.vmdk to [datastore2] devstack-image-cache_base/f721b0bd-3ff9-47e3-950c-4df5b98cd172/f721b0bd-3ff9-47e3-950c-4df5b98cd172.vmdk. {{(pid=62070) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 872.302937] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-a411a637-3b68-4ac7-92ce-84f7f01ccc2f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.310436] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 872.310436] env[62070]: value = "task-1121938" [ 872.310436] env[62070]: _type = "Task" [ 872.310436] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.318695] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121938, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.345062] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.364828] env[62070]: ERROR nova.scheduler.client.report [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [req-f93325b7-5dfb-4921-9793-555ce11771e7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 21c7c111-1b69-4468-b2c4-5dd96014fbd6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f93325b7-5dfb-4921-9793-555ce11771e7"}]} [ 872.383117] env[62070]: DEBUG nova.scheduler.client.report [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Refreshing inventories for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 872.399682] env[62070]: DEBUG nova.scheduler.client.report [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Updating ProviderTree inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 872.399899] env[62070]: DEBUG nova.compute.provider_tree [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 872.414532] env[62070]: DEBUG nova.scheduler.client.report [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Refreshing aggregate associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, aggregates: None {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 872.434370] env[62070]: DEBUG nova.scheduler.client.report [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Refreshing trait associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 872.470095] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "3d699ce5-4d21-48f3-8f17-0cd49aebf109" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.470343] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "3d699ce5-4d21-48f3-8f17-0cd49aebf109" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.624854] env[62070]: DEBUG oslo_concurrency.lockutils [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.816405] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f615297e-2387-49e1-8862-2dceff751248 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.825235] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121938, 'name': MoveVirtualDisk_Task} progress is 12%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.828361] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6708ba77-2598-4ff4-8761-a583fd28a6ad {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.861919] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ae35b5-e6fa-4a80-8737-6adec0c7cd02 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.870234] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-926901f1-5e01-44a0-8632-f963d779d7e5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.884465] env[62070]: DEBUG nova.compute.provider_tree [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 872.892012] env[62070]: INFO nova.compute.manager [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Rebuilding instance [ 872.935933] env[62070]: DEBUG nova.compute.manager [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 872.936911] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee2e5ac6-d280-42dc-8669-091785a17414 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.979380] env[62070]: DEBUG nova.compute.manager [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 873.004501] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "c16d175c-0b23-4f72-bdb0-844c6f80fd32" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.004979] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "c16d175c-0b23-4f72-bdb0-844c6f80fd32" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.005419] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "c16d175c-0b23-4f72-bdb0-844c6f80fd32-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.005499] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "c16d175c-0b23-4f72-bdb0-844c6f80fd32-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.005648] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "c16d175c-0b23-4f72-bdb0-844c6f80fd32-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.008668] env[62070]: INFO nova.compute.manager [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Terminating instance [ 873.011802] env[62070]: DEBUG nova.compute.manager [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 873.012156] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 873.013475] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef67ab71-1fae-4e1d-98ed-7d1821ed4285 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.026406] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 873.026406] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-707b1969-ff1a-42ab-ba82-94f4cbf4f90c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.111937] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 873.112645] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 873.112975] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Deleting the datastore file [datastore2] c16d175c-0b23-4f72-bdb0-844c6f80fd32 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 873.113475] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-93bdf85e-c2de-4dba-a537-bfe143b31621 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.121476] env[62070]: DEBUG oslo_vmware.api [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 873.121476] env[62070]: value = "task-1121940" [ 873.121476] env[62070]: _type = "Task" [ 873.121476] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.133066] env[62070]: DEBUG oslo_vmware.api [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121940, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.321877] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121938, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.427932] env[62070]: DEBUG nova.scheduler.client.report [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Updated inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with generation 102 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 873.428275] env[62070]: DEBUG nova.compute.provider_tree [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Updating resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 generation from 102 to 103 during operation: update_inventory {{(pid=62070) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 873.428468] env[62070]: DEBUG nova.compute.provider_tree [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 873.449938] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 873.450955] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-33116ccb-b856-4f78-9873-c80e107dc42a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.458179] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Waiting for the task: (returnval){ [ 873.458179] env[62070]: value = "task-1121942" [ 873.458179] env[62070]: _type = "Task" [ 873.458179] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.467141] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Task: {'id': task-1121942, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.506893] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.634519] env[62070]: DEBUG oslo_vmware.api [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121940, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.822217] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121938, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.935059] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.618s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.936258] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.592s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.938894] env[62070]: INFO nova.compute.claims [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 873.969243] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Task: {'id': task-1121942, 'name': PowerOffVM_Task, 'duration_secs': 0.509653} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.969529] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 873.969751] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 873.970680] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfed4885-d0b2-4811-acba-79fa449a5565 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.978874] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 873.979177] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1cdcf098-3a8e-45da-bb8e-ffcd331f1685 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.132941] env[62070]: DEBUG oslo_vmware.api [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121940, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.284922] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 874.285265] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 874.285488] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Deleting the datastore file [datastore2] 61ab347d-1342-4f59-8955-10d575993b77 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 874.285800] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f41d391e-fbfa-440a-b20d-422bab0f8f66 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.292987] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Waiting for the task: (returnval){ [ 874.292987] env[62070]: value = "task-1121944" [ 874.292987] env[62070]: _type = "Task" [ 874.292987] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.303233] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Task: {'id': task-1121944, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.322403] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121938, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.445266] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Acquiring lock "d505f05d-3a2b-4f32-9567-90722c67831d" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.445625] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Lock "d505f05d-3a2b-4f32-9567-90722c67831d" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.633310] env[62070]: DEBUG oslo_vmware.api [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121940, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.803288] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Task: {'id': task-1121944, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.823364] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121938, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.950506] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Lock "d505f05d-3a2b-4f32-9567-90722c67831d" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.505s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.951032] env[62070]: DEBUG nova.compute.manager [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 875.133539] env[62070]: DEBUG oslo_vmware.api [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1121940, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.547916} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.136153] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 875.136361] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 875.136544] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 875.136758] env[62070]: INFO nova.compute.manager [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Took 2.12 seconds to destroy the instance on the hypervisor. [ 875.137015] env[62070]: DEBUG oslo.service.loopingcall [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 875.137418] env[62070]: DEBUG nova.compute.manager [-] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 875.137519] env[62070]: DEBUG nova.network.neutron [-] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 875.302943] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5084f72-5a75-4757-ac99-35f7f4d596f6 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Volume attach. Driver type: vmdk {{(pid=62070) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 875.303219] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5084f72-5a75-4757-ac99-35f7f4d596f6 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245439', 'volume_id': '0a76875f-e286-469c-8be6-887850e6ce36', 'name': 'volume-0a76875f-e286-469c-8be6-887850e6ce36', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4a5f644a-1670-4c6b-a762-f87f1ee4cce5', 'attached_at': '', 'detached_at': '', 'volume_id': '0a76875f-e286-469c-8be6-887850e6ce36', 'serial': '0a76875f-e286-469c-8be6-887850e6ce36'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 875.304008] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc07222-ff94-4f12-b4b8-22fdda42019d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.311928] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Task: {'id': task-1121944, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.572336} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.326209] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 875.326422] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 875.326603] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 875.332878] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d790f39e-1880-48a6-8034-0ea985f60117 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.362916] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5084f72-5a75-4757-ac99-35f7f4d596f6 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] volume-0a76875f-e286-469c-8be6-887850e6ce36/volume-0a76875f-e286-469c-8be6-887850e6ce36.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 875.367643] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f051529-d5b9-410c-ac94-5f9ac602c4d0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.382238] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121938, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.573715} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.383281] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6cd2d88-7820-4340-bced-5cd4390a5780 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.386015] env[62070]: INFO nova.virt.vmwareapi.ds_util [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_c43a14dc-6d83-4b27-acc5-a841457df904/OSTACK_IMG_c43a14dc-6d83-4b27-acc5-a841457df904.vmdk to [datastore2] devstack-image-cache_base/f721b0bd-3ff9-47e3-950c-4df5b98cd172/f721b0bd-3ff9-47e3-950c-4df5b98cd172.vmdk. [ 875.386249] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Cleaning up location [datastore2] OSTACK_IMG_c43a14dc-6d83-4b27-acc5-a841457df904 {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 875.386856] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_c43a14dc-6d83-4b27-acc5-a841457df904 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 875.387648] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e4c6d2c9-fde0-4070-a6fd-f46a1c68d673 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.396652] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c97f93f-d832-47bb-907f-3cdedb2afcb2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.400252] env[62070]: DEBUG oslo_vmware.api [None req-c5084f72-5a75-4757-ac99-35f7f4d596f6 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 875.400252] env[62070]: value = "task-1121945" [ 875.400252] env[62070]: _type = "Task" [ 875.400252] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.401741] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 875.401741] env[62070]: value = "task-1121946" [ 875.401741] env[62070]: _type = "Task" [ 875.401741] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.434111] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0d4f97-2d34-4a52-a8c5-ab17a195d937 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.439553] env[62070]: DEBUG oslo_vmware.api [None req-c5084f72-5a75-4757-ac99-35f7f4d596f6 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1121945, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.443262] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121946, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034613} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.446468] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 875.446654] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f721b0bd-3ff9-47e3-950c-4df5b98cd172/f721b0bd-3ff9-47e3-950c-4df5b98cd172.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 875.446923] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f721b0bd-3ff9-47e3-950c-4df5b98cd172/f721b0bd-3ff9-47e3-950c-4df5b98cd172.vmdk to [datastore2] 58146b84-7589-4f21-bdab-605cee535e55/58146b84-7589-4f21-bdab-605cee535e55.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 875.448198] env[62070]: DEBUG nova.compute.manager [req-c6994cce-d5c2-46d8-a8c3-2b662ff99ba6 req-3c1f6bd4-e6ac-43b5-ba82-24970310e4c1 service nova] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Received event network-vif-deleted-5a04b813-465f-4855-8707-4db273ba30fd {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 875.448390] env[62070]: INFO nova.compute.manager [req-c6994cce-d5c2-46d8-a8c3-2b662ff99ba6 req-3c1f6bd4-e6ac-43b5-ba82-24970310e4c1 service nova] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Neutron deleted interface 5a04b813-465f-4855-8707-4db273ba30fd; detaching it from the instance and deleting it from the info cache [ 875.451546] env[62070]: DEBUG nova.network.neutron [req-c6994cce-d5c2-46d8-a8c3-2b662ff99ba6 req-3c1f6bd4-e6ac-43b5-ba82-24970310e4c1 service nova] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.451546] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b7c9099c-4ab2-4767-b184-d0e1e82b0b60 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.456054] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3bc425b-84a3-4bb2-90f0-bb774afe588f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.459875] env[62070]: DEBUG nova.compute.utils [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 875.461536] env[62070]: DEBUG nova.compute.manager [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 875.461746] env[62070]: DEBUG nova.network.neutron [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 875.466204] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 875.466204] env[62070]: value = "task-1121947" [ 875.466204] env[62070]: _type = "Task" [ 875.466204] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.475249] env[62070]: DEBUG nova.compute.provider_tree [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 875.484542] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121947, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.522678] env[62070]: DEBUG nova.policy [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '33322cfeb9d248a7999700e0b489de18', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b85884b5b2dd4099b4e0f9eeddde33cc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 875.903016] env[62070]: DEBUG nova.network.neutron [-] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.904717] env[62070]: DEBUG nova.network.neutron [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Successfully created port: 344dfad7-d2a7-4532-b01b-71b3a8e2b88a {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 875.921019] env[62070]: DEBUG oslo_vmware.api [None req-c5084f72-5a75-4757-ac99-35f7f4d596f6 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1121945, 'name': ReconfigVM_Task, 'duration_secs': 0.407324} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.921019] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5084f72-5a75-4757-ac99-35f7f4d596f6 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Reconfigured VM instance instance-0000003f to attach disk [datastore2] volume-0a76875f-e286-469c-8be6-887850e6ce36/volume-0a76875f-e286-469c-8be6-887850e6ce36.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 875.923311] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d5f1646-7c28-4c76-8c34-c58f16d68ef8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.939490] env[62070]: DEBUG oslo_vmware.api [None req-c5084f72-5a75-4757-ac99-35f7f4d596f6 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 875.939490] env[62070]: value = "task-1121948" [ 875.939490] env[62070]: _type = "Task" [ 875.939490] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.948745] env[62070]: DEBUG oslo_vmware.api [None req-c5084f72-5a75-4757-ac99-35f7f4d596f6 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1121948, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.959638] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d69d7a16-85d4-4080-b736-ce9ceaad412a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.965060] env[62070]: DEBUG nova.compute.manager [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 875.972621] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8adcb82-6105-44e1-a781-a5147c144592 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.985443] env[62070]: DEBUG nova.scheduler.client.report [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 875.998527] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121947, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.013678] env[62070]: DEBUG nova.compute.manager [req-c6994cce-d5c2-46d8-a8c3-2b662ff99ba6 req-3c1f6bd4-e6ac-43b5-ba82-24970310e4c1 service nova] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Detach interface failed, port_id=5a04b813-465f-4855-8707-4db273ba30fd, reason: Instance c16d175c-0b23-4f72-bdb0-844c6f80fd32 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 876.374747] env[62070]: DEBUG nova.virt.hardware [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 876.375159] env[62070]: DEBUG nova.virt.hardware [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 876.379160] env[62070]: DEBUG nova.virt.hardware [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 876.379160] env[62070]: DEBUG nova.virt.hardware [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 876.379160] env[62070]: DEBUG nova.virt.hardware [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 876.379160] env[62070]: DEBUG nova.virt.hardware [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 876.379160] env[62070]: DEBUG nova.virt.hardware [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 876.379160] env[62070]: DEBUG nova.virt.hardware [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 876.379160] env[62070]: DEBUG nova.virt.hardware [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 876.379160] env[62070]: DEBUG nova.virt.hardware [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 876.379160] env[62070]: DEBUG nova.virt.hardware [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 876.379160] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79f94c82-d2b2-4f4c-9afe-7005fc183328 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.387618] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0efa7dc-dd9f-4dbe-9c26-eb3a7debebc8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.403918] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Instance VIF info [] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 876.411827] env[62070]: DEBUG oslo.service.loopingcall [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 876.413121] env[62070]: INFO nova.compute.manager [-] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Took 1.27 seconds to deallocate network for instance. [ 876.413121] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 876.414557] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d3f70b98-3ecd-4075-93d0-087014651c46 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.435446] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 876.435446] env[62070]: value = "task-1121949" [ 876.435446] env[62070]: _type = "Task" [ 876.435446] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.447408] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121949, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.453488] env[62070]: DEBUG oslo_vmware.api [None req-c5084f72-5a75-4757-ac99-35f7f4d596f6 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1121948, 'name': ReconfigVM_Task, 'duration_secs': 0.181987} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.453915] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5084f72-5a75-4757-ac99-35f7f4d596f6 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245439', 'volume_id': '0a76875f-e286-469c-8be6-887850e6ce36', 'name': 'volume-0a76875f-e286-469c-8be6-887850e6ce36', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4a5f644a-1670-4c6b-a762-f87f1ee4cce5', 'attached_at': '', 'detached_at': '', 'volume_id': '0a76875f-e286-469c-8be6-887850e6ce36', 'serial': '0a76875f-e286-469c-8be6-887850e6ce36'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 876.494304] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.558s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.495558] env[62070]: DEBUG nova.compute.manager [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 876.497693] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121947, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.498427] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.072s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 876.500641] env[62070]: INFO nova.compute.claims [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 876.931743] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 876.947382] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121949, 'name': CreateVM_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.975524] env[62070]: DEBUG nova.compute.manager [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 876.995863] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121947, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.002153] env[62070]: DEBUG nova.compute.utils [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 877.004475] env[62070]: DEBUG nova.virt.hardware [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 877.004717] env[62070]: DEBUG nova.virt.hardware [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 877.004905] env[62070]: DEBUG nova.virt.hardware [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 877.005103] env[62070]: DEBUG nova.virt.hardware [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 877.005253] env[62070]: DEBUG nova.virt.hardware [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 877.005402] env[62070]: DEBUG nova.virt.hardware [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 877.005611] env[62070]: DEBUG nova.virt.hardware [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 877.005771] env[62070]: DEBUG nova.virt.hardware [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 877.005937] env[62070]: DEBUG nova.virt.hardware [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 877.006115] env[62070]: DEBUG nova.virt.hardware [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 877.006304] env[62070]: DEBUG nova.virt.hardware [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 877.006668] env[62070]: DEBUG nova.compute.manager [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 877.006862] env[62070]: DEBUG nova.network.neutron [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 877.012264] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b19ad831-149a-4aab-b9fa-e283e16295b7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.023633] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3156c7b-ed49-4ce3-b5bd-99c0c1ba9f71 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.058364] env[62070]: DEBUG nova.policy [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7534320dee8f486e90f5174aa94d00bd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '925dff51764c4b56ae7ea05fbde2ecdd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 877.142901] env[62070]: DEBUG oslo_vmware.rw_handles [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527a9a4b-6bfe-3418-1417-5fe48295fe06/disk-0.vmdk. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 877.143908] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e5270f-df03-4b39-8d20-cc05a180f6a0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.151307] env[62070]: DEBUG oslo_vmware.rw_handles [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527a9a4b-6bfe-3418-1417-5fe48295fe06/disk-0.vmdk is in state: ready. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 877.151424] env[62070]: ERROR oslo_vmware.rw_handles [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527a9a4b-6bfe-3418-1417-5fe48295fe06/disk-0.vmdk due to incomplete transfer. [ 877.151701] env[62070]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a3a62dac-7f0e-43d6-bd89-69f07a47f510 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.160934] env[62070]: DEBUG oslo_vmware.rw_handles [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527a9a4b-6bfe-3418-1417-5fe48295fe06/disk-0.vmdk. {{(pid=62070) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 877.161279] env[62070]: DEBUG nova.virt.vmwareapi.images [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Uploaded image 68e7a958-3cd7-4176-af06-dab64f015559 to the Glance image server {{(pid=62070) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 877.163962] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Destroying the VM {{(pid=62070) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 877.164619] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5649fae7-161a-461f-aa7e-e1163ebec4b8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.173751] env[62070]: DEBUG oslo_vmware.api [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 877.173751] env[62070]: value = "task-1121950" [ 877.173751] env[62070]: _type = "Task" [ 877.173751] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.182645] env[62070]: DEBUG oslo_vmware.api [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121950, 'name': Destroy_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.442905] env[62070]: DEBUG nova.network.neutron [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Successfully created port: 45420f68-e309-4569-8dac-28e16d9417d7 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 877.451073] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121949, 'name': CreateVM_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.494771] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121947, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.499939] env[62070]: DEBUG nova.objects.instance [None req-c5084f72-5a75-4757-ac99-35f7f4d596f6 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lazy-loading 'flavor' on Instance uuid 4a5f644a-1670-4c6b-a762-f87f1ee4cce5 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 877.502757] env[62070]: DEBUG nova.compute.manager [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 877.620862] env[62070]: DEBUG nova.compute.manager [req-6adb6754-6446-4143-89f1-3f28fb8d4cd1 req-4bff2a9f-a2f5-47e3-a171-adac398bb6c2 service nova] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Received event network-vif-plugged-344dfad7-d2a7-4532-b01b-71b3a8e2b88a {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 877.621159] env[62070]: DEBUG oslo_concurrency.lockutils [req-6adb6754-6446-4143-89f1-3f28fb8d4cd1 req-4bff2a9f-a2f5-47e3-a171-adac398bb6c2 service nova] Acquiring lock "71c98ac8-4149-448b-bf0c-3bfdcc8f50ef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.621379] env[62070]: DEBUG oslo_concurrency.lockutils [req-6adb6754-6446-4143-89f1-3f28fb8d4cd1 req-4bff2a9f-a2f5-47e3-a171-adac398bb6c2 service nova] Lock "71c98ac8-4149-448b-bf0c-3bfdcc8f50ef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.621549] env[62070]: DEBUG oslo_concurrency.lockutils [req-6adb6754-6446-4143-89f1-3f28fb8d4cd1 req-4bff2a9f-a2f5-47e3-a171-adac398bb6c2 service nova] Lock "71c98ac8-4149-448b-bf0c-3bfdcc8f50ef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.623777] env[62070]: DEBUG nova.compute.manager [req-6adb6754-6446-4143-89f1-3f28fb8d4cd1 req-4bff2a9f-a2f5-47e3-a171-adac398bb6c2 service nova] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] No waiting events found dispatching network-vif-plugged-344dfad7-d2a7-4532-b01b-71b3a8e2b88a {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 877.624493] env[62070]: WARNING nova.compute.manager [req-6adb6754-6446-4143-89f1-3f28fb8d4cd1 req-4bff2a9f-a2f5-47e3-a171-adac398bb6c2 service nova] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Received unexpected event network-vif-plugged-344dfad7-d2a7-4532-b01b-71b3a8e2b88a for instance with vm_state building and task_state spawning. [ 877.684135] env[62070]: DEBUG oslo_vmware.api [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121950, 'name': Destroy_Task, 'duration_secs': 0.406566} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.684135] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Destroyed the VM [ 877.684135] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Deleting Snapshot of the VM instance {{(pid=62070) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 877.686782] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c3537adb-127e-4395-8e27-70c7cdff231b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.696183] env[62070]: DEBUG oslo_vmware.api [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 877.696183] env[62070]: value = "task-1121951" [ 877.696183] env[62070]: _type = "Task" [ 877.696183] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.710193] env[62070]: DEBUG oslo_vmware.api [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121951, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.945532] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17bb42c-eb7b-4656-bc09-5664f7c7648c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.951952] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121949, 'name': CreateVM_Task, 'duration_secs': 1.466356} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.954174] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 877.954174] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.954394] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.954882] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 877.957231] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ec3c974-8644-4ffa-9c69-a1c0a713cae9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.960472] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9d516c-05c9-47d8-85ae-09a1fa1acbf9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.969031] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Waiting for the task: (returnval){ [ 877.969031] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52832c29-a2ac-af88-e891-32059252f988" [ 877.969031] env[62070]: _type = "Task" [ 877.969031] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.005924] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06dbba7-a7a8-4de4-8dcb-61439f02f8f5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.014350] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c5084f72-5a75-4757-ac99-35f7f4d596f6 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.337s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.026836] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121947, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.373237} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.027125] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52832c29-a2ac-af88-e891-32059252f988, 'name': SearchDatastore_Task, 'duration_secs': 0.009122} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.027599] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f721b0bd-3ff9-47e3-950c-4df5b98cd172/f721b0bd-3ff9-47e3-950c-4df5b98cd172.vmdk to [datastore2] 58146b84-7589-4f21-bdab-605cee535e55/58146b84-7589-4f21-bdab-605cee535e55.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 878.028896] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7ffe1c-06e0-4528-a39b-1e4559951e67 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.032891] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.033169] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 878.033398] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.033553] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.037027] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 878.037027] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442ad64a-990b-4f2c-baae-47e76279c023 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.037329] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a10c8770-111b-4a28-8f64-9505f65186b0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.061938] env[62070]: DEBUG nova.compute.provider_tree [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 878.073923] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] 58146b84-7589-4f21-bdab-605cee535e55/58146b84-7589-4f21-bdab-605cee535e55.vmdk or device None with type streamOptimized {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 878.074389] env[62070]: DEBUG nova.scheduler.client.report [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 878.077766] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1be9bbd5-459f-40d0-8abb-e232413ec102 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.096021] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 878.096021] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 878.096021] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.597s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.096021] env[62070]: DEBUG nova.compute.manager [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 878.099315] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20c24790-700e-4265-beab-879d8cd18b99 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.101905] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.128s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.103805] env[62070]: INFO nova.compute.claims [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 878.108013] env[62070]: DEBUG nova.network.neutron [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Successfully updated port: 344dfad7-d2a7-4532-b01b-71b3a8e2b88a {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 878.113743] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Waiting for the task: (returnval){ [ 878.113743] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52b0567e-b179-dfe5-fb3a-afca31f628d9" [ 878.113743] env[62070]: _type = "Task" [ 878.113743] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.115548] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 878.115548] env[62070]: value = "task-1121952" [ 878.115548] env[62070]: _type = "Task" [ 878.115548] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.129906] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52b0567e-b179-dfe5-fb3a-afca31f628d9, 'name': SearchDatastore_Task, 'duration_secs': 0.010116} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.138375] env[62070]: DEBUG nova.compute.manager [req-f9f51e5d-a160-404b-b6c8-d48618fb0c8d req-ed691189-af64-4afd-af24-a15b306bf55c service nova] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Received event network-changed-344dfad7-d2a7-4532-b01b-71b3a8e2b88a {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 878.138375] env[62070]: DEBUG nova.compute.manager [req-f9f51e5d-a160-404b-b6c8-d48618fb0c8d req-ed691189-af64-4afd-af24-a15b306bf55c service nova] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Refreshing instance network info cache due to event network-changed-344dfad7-d2a7-4532-b01b-71b3a8e2b88a. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 878.138375] env[62070]: DEBUG oslo_concurrency.lockutils [req-f9f51e5d-a160-404b-b6c8-d48618fb0c8d req-ed691189-af64-4afd-af24-a15b306bf55c service nova] Acquiring lock "refresh_cache-71c98ac8-4149-448b-bf0c-3bfdcc8f50ef" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.138375] env[62070]: DEBUG oslo_concurrency.lockutils [req-f9f51e5d-a160-404b-b6c8-d48618fb0c8d req-ed691189-af64-4afd-af24-a15b306bf55c service nova] Acquired lock "refresh_cache-71c98ac8-4149-448b-bf0c-3bfdcc8f50ef" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.138375] env[62070]: DEBUG nova.network.neutron [req-f9f51e5d-a160-404b-b6c8-d48618fb0c8d req-ed691189-af64-4afd-af24-a15b306bf55c service nova] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Refreshing network info cache for port 344dfad7-d2a7-4532-b01b-71b3a8e2b88a {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 878.139443] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121952, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.139906] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9ffa580-2bca-4ba8-bbd3-11e6f2baad56 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.146498] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Waiting for the task: (returnval){ [ 878.146498] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52bbb992-0892-e8d7-363b-d28708d08936" [ 878.146498] env[62070]: _type = "Task" [ 878.146498] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.158723] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52bbb992-0892-e8d7-363b-d28708d08936, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.206978] env[62070]: DEBUG oslo_vmware.api [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121951, 'name': RemoveSnapshot_Task, 'duration_secs': 0.393672} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.207282] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Deleted Snapshot of the VM instance {{(pid=62070) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 878.207571] env[62070]: DEBUG nova.compute.manager [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 878.208476] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43f73394-6082-423d-aab2-1a85faa8525a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.228654] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ed2e6aa3-cd60-479b-9970-e6fa21508aa9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.228994] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ed2e6aa3-cd60-479b-9970-e6fa21508aa9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.229284] env[62070]: DEBUG nova.compute.manager [None req-ed2e6aa3-cd60-479b-9970-e6fa21508aa9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 878.230186] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cfb35bf-ad60-4729-949c-7240e16f54fb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.237436] env[62070]: DEBUG nova.compute.manager [None req-ed2e6aa3-cd60-479b-9970-e6fa21508aa9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62070) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 878.237840] env[62070]: DEBUG nova.objects.instance [None req-ed2e6aa3-cd60-479b-9970-e6fa21508aa9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lazy-loading 'flavor' on Instance uuid 4a5f644a-1670-4c6b-a762-f87f1ee4cce5 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 878.518641] env[62070]: DEBUG nova.compute.manager [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 878.545608] env[62070]: DEBUG nova.virt.hardware [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 878.545874] env[62070]: DEBUG nova.virt.hardware [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 878.546054] env[62070]: DEBUG nova.virt.hardware [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 878.546250] env[62070]: DEBUG nova.virt.hardware [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 878.546404] env[62070]: DEBUG nova.virt.hardware [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 878.546557] env[62070]: DEBUG nova.virt.hardware [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 878.546790] env[62070]: DEBUG nova.virt.hardware [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 878.546973] env[62070]: DEBUG nova.virt.hardware [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 878.547545] env[62070]: DEBUG nova.virt.hardware [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 878.547545] env[62070]: DEBUG nova.virt.hardware [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 878.547545] env[62070]: DEBUG nova.virt.hardware [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 878.548543] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-822b8042-cf28-4615-b68a-0aa0a6eee8d5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.556673] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e74a9e1-4fa3-4c66-8bab-b6154973c2bd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.610312] env[62070]: DEBUG nova.compute.utils [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 878.613177] env[62070]: DEBUG nova.compute.manager [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 878.613177] env[62070]: DEBUG nova.network.neutron [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 878.614840] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Acquiring lock "refresh_cache-71c98ac8-4149-448b-bf0c-3bfdcc8f50ef" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.629576] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121952, 'name': ReconfigVM_Task, 'duration_secs': 0.294311} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.630047] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Reconfigured VM instance instance-00000049 to attach disk [datastore2] 58146b84-7589-4f21-bdab-605cee535e55/58146b84-7589-4f21-bdab-605cee535e55.vmdk or device None with type streamOptimized {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 878.630568] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ebdcd744-37a4-4c77-8b3a-e9e5f97112b4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.636825] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 878.636825] env[62070]: value = "task-1121953" [ 878.636825] env[62070]: _type = "Task" [ 878.636825] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.645380] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121953, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.655227] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52bbb992-0892-e8d7-363b-d28708d08936, 'name': SearchDatastore_Task, 'duration_secs': 0.009271} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.655501] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.655764] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 61ab347d-1342-4f59-8955-10d575993b77/61ab347d-1342-4f59-8955-10d575993b77.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 878.656601] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e3cf98b5-1c9a-4ee5-9811-25e81131a13b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.663253] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Waiting for the task: (returnval){ [ 878.663253] env[62070]: value = "task-1121954" [ 878.663253] env[62070]: _type = "Task" [ 878.663253] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.671725] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Task: {'id': task-1121954, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.701316] env[62070]: DEBUG nova.network.neutron [req-f9f51e5d-a160-404b-b6c8-d48618fb0c8d req-ed691189-af64-4afd-af24-a15b306bf55c service nova] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 878.704914] env[62070]: DEBUG nova.policy [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c87e1a3eb9a34101b9caf2c93ded3c4f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '647582b6f4a048aea74c761f4e136a34', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 878.722947] env[62070]: INFO nova.compute.manager [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Shelve offloading [ 878.724696] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 878.724966] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-751dfb2f-0f0f-4f19-aeb7-5e7e715987fc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.732692] env[62070]: DEBUG oslo_vmware.api [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 878.732692] env[62070]: value = "task-1121955" [ 878.732692] env[62070]: _type = "Task" [ 878.732692] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.744400] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] VM already powered off {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 878.744546] env[62070]: DEBUG nova.compute.manager [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 878.745163] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed2e6aa3-cd60-479b-9970-e6fa21508aa9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 878.745904] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3039654-803b-4dcb-8b03-8b7c205efd7a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.750658] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a9138594-afb3-4aea-8360-825cd7d20ab4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.756101] env[62070]: DEBUG oslo_concurrency.lockutils [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "refresh_cache-62758a38-4819-4d5a-97ed-db6c9ceb97bf" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.756441] env[62070]: DEBUG oslo_concurrency.lockutils [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired lock "refresh_cache-62758a38-4819-4d5a-97ed-db6c9ceb97bf" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.756704] env[62070]: DEBUG nova.network.neutron [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 878.759441] env[62070]: DEBUG oslo_vmware.api [None req-ed2e6aa3-cd60-479b-9970-e6fa21508aa9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 878.759441] env[62070]: value = "task-1121956" [ 878.759441] env[62070]: _type = "Task" [ 878.759441] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.768063] env[62070]: DEBUG oslo_vmware.api [None req-ed2e6aa3-cd60-479b-9970-e6fa21508aa9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1121956, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.857021] env[62070]: DEBUG nova.network.neutron [req-f9f51e5d-a160-404b-b6c8-d48618fb0c8d req-ed691189-af64-4afd-af24-a15b306bf55c service nova] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.943804] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8dcef4a2-dbbe-4378-9adf-7931bdf45f02 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "71aead12-a182-40a7-b5a9-91c01271b800" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.945325] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8dcef4a2-dbbe-4378-9adf-7931bdf45f02 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "71aead12-a182-40a7-b5a9-91c01271b800" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.945325] env[62070]: INFO nova.compute.manager [None req-8dcef4a2-dbbe-4378-9adf-7931bdf45f02 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Rebooting instance [ 879.117488] env[62070]: DEBUG nova.compute.manager [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 879.150488] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121953, 'name': Rename_Task, 'duration_secs': 0.132629} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.151573] env[62070]: DEBUG nova.network.neutron [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Successfully created port: c24d842f-4fc0-417f-a913-acda1bd7c41b {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 879.157447] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 879.158275] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12e3fc27-9f05-453b-9ce7-3948c4510e27 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.171441] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 879.171441] env[62070]: value = "task-1121957" [ 879.171441] env[62070]: _type = "Task" [ 879.171441] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.179892] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Task: {'id': task-1121954, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.186671] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121957, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.205978] env[62070]: DEBUG nova.compute.manager [req-5ee620b3-4a3b-4614-b9a7-dee8414dd190 req-66b3bbc7-0cb0-49c4-80b1-cd569535904d service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Received event network-vif-plugged-45420f68-e309-4569-8dac-28e16d9417d7 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 879.205978] env[62070]: DEBUG oslo_concurrency.lockutils [req-5ee620b3-4a3b-4614-b9a7-dee8414dd190 req-66b3bbc7-0cb0-49c4-80b1-cd569535904d service nova] Acquiring lock "21bcb1a6-833b-48f3-8ee2-0e49c64a104f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.205978] env[62070]: DEBUG oslo_concurrency.lockutils [req-5ee620b3-4a3b-4614-b9a7-dee8414dd190 req-66b3bbc7-0cb0-49c4-80b1-cd569535904d service nova] Lock "21bcb1a6-833b-48f3-8ee2-0e49c64a104f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.206240] env[62070]: DEBUG oslo_concurrency.lockutils [req-5ee620b3-4a3b-4614-b9a7-dee8414dd190 req-66b3bbc7-0cb0-49c4-80b1-cd569535904d service nova] Lock "21bcb1a6-833b-48f3-8ee2-0e49c64a104f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.206481] env[62070]: DEBUG nova.compute.manager [req-5ee620b3-4a3b-4614-b9a7-dee8414dd190 req-66b3bbc7-0cb0-49c4-80b1-cd569535904d service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] No waiting events found dispatching network-vif-plugged-45420f68-e309-4569-8dac-28e16d9417d7 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 879.206679] env[62070]: WARNING nova.compute.manager [req-5ee620b3-4a3b-4614-b9a7-dee8414dd190 req-66b3bbc7-0cb0-49c4-80b1-cd569535904d service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Received unexpected event network-vif-plugged-45420f68-e309-4569-8dac-28e16d9417d7 for instance with vm_state building and task_state spawning. [ 879.274717] env[62070]: DEBUG nova.network.neutron [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Successfully updated port: 45420f68-e309-4569-8dac-28e16d9417d7 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 879.285019] env[62070]: DEBUG oslo_vmware.api [None req-ed2e6aa3-cd60-479b-9970-e6fa21508aa9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1121956, 'name': PowerOffVM_Task, 'duration_secs': 0.213849} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.285019] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed2e6aa3-cd60-479b-9970-e6fa21508aa9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 879.285019] env[62070]: DEBUG nova.compute.manager [None req-ed2e6aa3-cd60-479b-9970-e6fa21508aa9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 879.287146] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-026d72e3-0969-4adb-879d-56e6081d5711 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.357366] env[62070]: DEBUG oslo_concurrency.lockutils [req-f9f51e5d-a160-404b-b6c8-d48618fb0c8d req-ed691189-af64-4afd-af24-a15b306bf55c service nova] Releasing lock "refresh_cache-71c98ac8-4149-448b-bf0c-3bfdcc8f50ef" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.358101] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Acquired lock "refresh_cache-71c98ac8-4149-448b-bf0c-3bfdcc8f50ef" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.358278] env[62070]: DEBUG nova.network.neutron [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 879.470792] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8dcef4a2-dbbe-4378-9adf-7931bdf45f02 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.471101] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8dcef4a2-dbbe-4378-9adf-7931bdf45f02 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.471381] env[62070]: DEBUG nova.network.neutron [None req-8dcef4a2-dbbe-4378-9adf-7931bdf45f02 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 879.559790] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870ee29c-5a19-4901-b304-8bbd0bf457ba {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.572703] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7ef0de-768b-44ab-9650-d28d34f603c3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.603990] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e17d5af-4388-4179-b307-ae164c268c36 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.615345] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ded6c77-e345-4c5f-920e-1fd1f090e86c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.628665] env[62070]: DEBUG nova.compute.provider_tree [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 879.679202] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Task: {'id': task-1121954, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.59788} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.679909] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 61ab347d-1342-4f59-8955-10d575993b77/61ab347d-1342-4f59-8955-10d575993b77.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 879.680102] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 879.681033] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3d49eb6d-624e-40a3-ba3b-d82ab6f01718 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.688527] env[62070]: DEBUG oslo_vmware.api [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121957, 'name': PowerOnVM_Task, 'duration_secs': 0.502612} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.688527] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 879.688527] env[62070]: INFO nova.compute.manager [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Took 14.44 seconds to spawn the instance on the hypervisor. [ 879.688527] env[62070]: DEBUG nova.compute.manager [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 879.688527] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e645fd-9fda-48f6-9793-ad6189351be4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.692480] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Waiting for the task: (returnval){ [ 879.692480] env[62070]: value = "task-1121958" [ 879.692480] env[62070]: _type = "Task" [ 879.692480] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.703012] env[62070]: DEBUG nova.network.neutron [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Updating instance_info_cache with network_info: [{"id": "c4a1b7aa-611b-422e-9678-70513f52b764", "address": "fa:16:3e:12:de:d6", "network": {"id": "5ea0fffc-372c-450e-b27b-10959077d58f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1853458988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9191f0e6c2ee401abca64c0780e230bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3c995e9-7f2f-420c-880a-d60da6e708ad", "external-id": "nsx-vlan-transportzone-166", "segmentation_id": 166, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4a1b7aa-61", "ovs_interfaceid": "c4a1b7aa-611b-422e-9678-70513f52b764", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.709637] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Task: {'id': task-1121958, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.785460] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.785460] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.785460] env[62070]: DEBUG nova.network.neutron [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 879.800276] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ed2e6aa3-cd60-479b-9970-e6fa21508aa9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.571s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.890989] env[62070]: DEBUG nova.network.neutron [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 880.078662] env[62070]: DEBUG nova.network.neutron [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Updating instance_info_cache with network_info: [{"id": "344dfad7-d2a7-4532-b01b-71b3a8e2b88a", "address": "fa:16:3e:0b:52:b3", "network": {"id": "1f62ef09-8fb6-48c6-b74a-b85b95c92f1f", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-2058463056-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b85884b5b2dd4099b4e0f9eeddde33cc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap344dfad7-d2", "ovs_interfaceid": "344dfad7-d2a7-4532-b01b-71b3a8e2b88a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.131497] env[62070]: DEBUG nova.scheduler.client.report [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 880.135417] env[62070]: DEBUG nova.compute.manager [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 880.163582] env[62070]: DEBUG nova.virt.hardware [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 880.163892] env[62070]: DEBUG nova.virt.hardware [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 880.164069] env[62070]: DEBUG nova.virt.hardware [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 880.164267] env[62070]: DEBUG nova.virt.hardware [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 880.164418] env[62070]: DEBUG nova.virt.hardware [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 880.164577] env[62070]: DEBUG nova.virt.hardware [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 880.164818] env[62070]: DEBUG nova.virt.hardware [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 880.164982] env[62070]: DEBUG nova.virt.hardware [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 880.165178] env[62070]: DEBUG nova.virt.hardware [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 880.165333] env[62070]: DEBUG nova.virt.hardware [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 880.165511] env[62070]: DEBUG nova.virt.hardware [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 880.166378] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be6703b-a71a-49c2-89a0-eb7d741ff9fd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.176058] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739a07a7-8a31-4350-991e-29d6c2381a2d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.201308] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Task: {'id': task-1121958, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07226} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.201591] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 880.202361] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9815596c-4987-4541-868a-30aa4ca3759e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.210768] env[62070]: DEBUG oslo_concurrency.lockutils [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Releasing lock "refresh_cache-62758a38-4819-4d5a-97ed-db6c9ceb97bf" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.213216] env[62070]: INFO nova.compute.manager [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Took 42.13 seconds to build instance. [ 880.231032] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 61ab347d-1342-4f59-8955-10d575993b77/61ab347d-1342-4f59-8955-10d575993b77.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 880.231936] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-801475cb-05d3-450d-9258-ec25a892e21e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.253294] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Waiting for the task: (returnval){ [ 880.253294] env[62070]: value = "task-1121959" [ 880.253294] env[62070]: _type = "Task" [ 880.253294] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.271265] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Task: {'id': task-1121959, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.338751] env[62070]: DEBUG nova.network.neutron [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 880.582182] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Releasing lock "refresh_cache-71c98ac8-4149-448b-bf0c-3bfdcc8f50ef" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.582533] env[62070]: DEBUG nova.compute.manager [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Instance network_info: |[{"id": "344dfad7-d2a7-4532-b01b-71b3a8e2b88a", "address": "fa:16:3e:0b:52:b3", "network": {"id": "1f62ef09-8fb6-48c6-b74a-b85b95c92f1f", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-2058463056-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b85884b5b2dd4099b4e0f9eeddde33cc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap344dfad7-d2", "ovs_interfaceid": "344dfad7-d2a7-4532-b01b-71b3a8e2b88a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 880.584668] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 880.584668] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:52:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '28d04eee-6dbb-491a-a999-b659c799679d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '344dfad7-d2a7-4532-b01b-71b3a8e2b88a', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 880.591843] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Creating folder: Project (b85884b5b2dd4099b4e0f9eeddde33cc). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 880.592604] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f96fe6b-862e-456e-a78a-491edf2b6cb2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.597992] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9cedf857-df28-426e-92dc-3f595730ef05 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.605939] env[62070]: DEBUG nova.compute.manager [req-36774f2b-85e5-4230-ac46-980fb0629f33 req-cbe88227-04b9-4b06-ad58-613c913417ae service nova] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Received event network-vif-unplugged-c4a1b7aa-611b-422e-9678-70513f52b764 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 880.606173] env[62070]: DEBUG oslo_concurrency.lockutils [req-36774f2b-85e5-4230-ac46-980fb0629f33 req-cbe88227-04b9-4b06-ad58-613c913417ae service nova] Acquiring lock "62758a38-4819-4d5a-97ed-db6c9ceb97bf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.606390] env[62070]: DEBUG oslo_concurrency.lockutils [req-36774f2b-85e5-4230-ac46-980fb0629f33 req-cbe88227-04b9-4b06-ad58-613c913417ae service nova] Lock "62758a38-4819-4d5a-97ed-db6c9ceb97bf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.606559] env[62070]: DEBUG oslo_concurrency.lockutils [req-36774f2b-85e5-4230-ac46-980fb0629f33 req-cbe88227-04b9-4b06-ad58-613c913417ae service nova] Lock "62758a38-4819-4d5a-97ed-db6c9ceb97bf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.606732] env[62070]: DEBUG nova.compute.manager [req-36774f2b-85e5-4230-ac46-980fb0629f33 req-cbe88227-04b9-4b06-ad58-613c913417ae service nova] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] No waiting events found dispatching network-vif-unplugged-c4a1b7aa-611b-422e-9678-70513f52b764 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 880.606944] env[62070]: WARNING nova.compute.manager [req-36774f2b-85e5-4230-ac46-980fb0629f33 req-cbe88227-04b9-4b06-ad58-613c913417ae service nova] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Received unexpected event network-vif-unplugged-c4a1b7aa-611b-422e-9678-70513f52b764 for instance with vm_state shelved and task_state shelving_offloading. [ 880.609826] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 880.611060] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d5cf1722-c2c7-45df-9a18-96d78d164861 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.612563] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Created folder: Project (b85884b5b2dd4099b4e0f9eeddde33cc) in parent group-v245319. [ 880.612756] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Creating folder: Instances. Parent ref: group-v245441. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 880.613260] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-700bddbe-28cf-4008-b199-32a68ccf68f0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.623606] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Created folder: Instances in parent group-v245441. [ 880.623816] env[62070]: DEBUG oslo.service.loopingcall [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 880.624091] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 880.624237] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-96778a49-6407-41e9-8f65-b35e7218532d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.653382] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.551s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.654245] env[62070]: DEBUG nova.compute.manager [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 880.659124] env[62070]: DEBUG oslo_concurrency.lockutils [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.469s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.659124] env[62070]: DEBUG nova.objects.instance [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lazy-loading 'resources' on Instance uuid 27987ff6-77c9-4876-8b39-dcc20ce4158a {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 880.665058] env[62070]: DEBUG nova.network.neutron [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Updating instance_info_cache with network_info: [{"id": "45420f68-e309-4569-8dac-28e16d9417d7", "address": "fa:16:3e:08:c2:70", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45420f68-e3", "ovs_interfaceid": "45420f68-e309-4569-8dac-28e16d9417d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.667542] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 880.667542] env[62070]: value = "task-1121963" [ 880.667542] env[62070]: _type = "Task" [ 880.667542] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.668535] env[62070]: DEBUG nova.network.neutron [None req-8dcef4a2-dbbe-4378-9adf-7931bdf45f02 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Updating instance_info_cache with network_info: [{"id": "a3ed0957-14c2-4144-8d45-f4a0e5cb45ab", "address": "fa:16:3e:3c:6a:3d", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3ed0957-14", "ovs_interfaceid": "a3ed0957-14c2-4144-8d45-f4a0e5cb45ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.681601] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121963, 'name': CreateVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.685197] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 880.685197] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 880.685197] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Deleting the datastore file [datastore2] 62758a38-4819-4d5a-97ed-db6c9ceb97bf {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 880.685706] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-145b9a6a-b5b8-4b0b-a63c-53ee9c694f83 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.693906] env[62070]: DEBUG oslo_vmware.api [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 880.693906] env[62070]: value = "task-1121964" [ 880.693906] env[62070]: _type = "Task" [ 880.693906] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.706526] env[62070]: DEBUG oslo_vmware.api [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121964, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.715535] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cdabb9ea-ead2-4381-8d7a-de546f5dce91 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "58146b84-7589-4f21-bdab-605cee535e55" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.233s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.764024] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Task: {'id': task-1121959, 'name': ReconfigVM_Task, 'duration_secs': 0.257337} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.764024] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 61ab347d-1342-4f59-8955-10d575993b77/61ab347d-1342-4f59-8955-10d575993b77.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 880.764217] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9c571435-e003-4f9f-8834-b902fb7a1ad2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.770224] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Waiting for the task: (returnval){ [ 880.770224] env[62070]: value = "task-1121965" [ 880.770224] env[62070]: _type = "Task" [ 880.770224] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.778199] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Task: {'id': task-1121965, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.064105] env[62070]: DEBUG nova.network.neutron [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Successfully updated port: c24d842f-4fc0-417f-a913-acda1bd7c41b {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 881.159039] env[62070]: DEBUG nova.compute.utils [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 881.161364] env[62070]: DEBUG nova.compute.manager [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 881.161563] env[62070]: DEBUG nova.network.neutron [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 881.166901] env[62070]: DEBUG nova.objects.instance [None req-5b0ea8b7-b917-423f-a690-eae734229813 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lazy-loading 'flavor' on Instance uuid 4a5f644a-1670-4c6b-a762-f87f1ee4cce5 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 881.168221] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Releasing lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.168501] env[62070]: DEBUG nova.compute.manager [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Instance network_info: |[{"id": "45420f68-e309-4569-8dac-28e16d9417d7", "address": "fa:16:3e:08:c2:70", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45420f68-e3", "ovs_interfaceid": "45420f68-e309-4569-8dac-28e16d9417d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 881.169196] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:c2:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2c7c1b46-cb81-45da-b5aa-7905d4da5854', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '45420f68-e309-4569-8dac-28e16d9417d7', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 881.178109] env[62070]: DEBUG oslo.service.loopingcall [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 881.179806] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8dcef4a2-dbbe-4378-9adf-7931bdf45f02 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Releasing lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.181479] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 881.181942] env[62070]: DEBUG nova.compute.manager [None req-8dcef4a2-dbbe-4378-9adf-7931bdf45f02 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 881.185610] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-15884b2f-89f6-4644-bd0f-9a7de6dd6075 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.203146] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-886ee9b7-0017-4150-ae71-21a7a017a432 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.206909] env[62070]: DEBUG nova.policy [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dd9c555c175549bc9bd22cdad4b320ab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2dae68f478d549e5991c37b80c858468', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 881.222624] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121963, 'name': CreateVM_Task, 'duration_secs': 0.341591} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.223785] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 881.231623] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.231816] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.232179] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 881.232719] env[62070]: DEBUG oslo_vmware.api [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1121964, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150225} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.232905] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 881.232905] env[62070]: value = "task-1121966" [ 881.232905] env[62070]: _type = "Task" [ 881.232905] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.235619] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00224b0a-8e9f-4144-b04f-afe08df47515 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.237426] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 881.237624] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 881.237811] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 881.247082] env[62070]: DEBUG oslo_vmware.api [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Waiting for the task: (returnval){ [ 881.247082] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]529f5ee2-1d51-853a-d5cc-dc53feb3926e" [ 881.247082] env[62070]: _type = "Task" [ 881.247082] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.253394] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121966, 'name': CreateVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.257548] env[62070]: INFO nova.scheduler.client.report [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Deleted allocations for instance 62758a38-4819-4d5a-97ed-db6c9ceb97bf [ 881.267848] env[62070]: DEBUG oslo_vmware.api [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]529f5ee2-1d51-853a-d5cc-dc53feb3926e, 'name': SearchDatastore_Task, 'duration_secs': 0.00974} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.267848] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.267848] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 881.267848] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.267848] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.267848] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 881.267848] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82c91092-d5f1-4ed9-b886-395c3330a452 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.279310] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 881.279572] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 881.280621] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69f37055-5001-479b-9567-d9076db4ea4d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.287332] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Task: {'id': task-1121965, 'name': Rename_Task, 'duration_secs': 0.164752} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.288094] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 881.288368] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-73249b2a-e70e-41c1-ad54-4362da9c4890 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.291773] env[62070]: DEBUG oslo_vmware.api [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Waiting for the task: (returnval){ [ 881.291773] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52c68231-33c3-c317-fc56-a2ff71a4eb75" [ 881.291773] env[62070]: _type = "Task" [ 881.291773] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.299978] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Waiting for the task: (returnval){ [ 881.299978] env[62070]: value = "task-1121967" [ 881.299978] env[62070]: _type = "Task" [ 881.299978] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.309619] env[62070]: DEBUG oslo_vmware.api [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52c68231-33c3-c317-fc56-a2ff71a4eb75, 'name': SearchDatastore_Task, 'duration_secs': 0.010542} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.313439] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68a546d3-8244-4bed-954c-428bcb844098 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.319141] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Task: {'id': task-1121967, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.322333] env[62070]: DEBUG oslo_vmware.api [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Waiting for the task: (returnval){ [ 881.322333] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52f80f81-2e47-f75f-0a52-3ca22c74ac74" [ 881.322333] env[62070]: _type = "Task" [ 881.322333] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.333072] env[62070]: DEBUG oslo_vmware.api [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52f80f81-2e47-f75f-0a52-3ca22c74ac74, 'name': SearchDatastore_Task, 'duration_secs': 0.008502} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.333330] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.333589] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef/71c98ac8-4149-448b-bf0c-3bfdcc8f50ef.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 881.333846] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-67482649-96e9-481c-9deb-8288c3df5143 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.341964] env[62070]: DEBUG oslo_vmware.api [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Waiting for the task: (returnval){ [ 881.341964] env[62070]: value = "task-1121968" [ 881.341964] env[62070]: _type = "Task" [ 881.341964] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.351602] env[62070]: DEBUG oslo_vmware.api [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Task: {'id': task-1121968, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.363715] env[62070]: DEBUG nova.compute.manager [req-9d39d8b2-5287-43b3-b3e0-5316f8850c68 req-7af27ffc-bef8-4ca9-a105-e4e3b32cf7e8 service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Received event network-changed-45420f68-e309-4569-8dac-28e16d9417d7 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 881.363936] env[62070]: DEBUG nova.compute.manager [req-9d39d8b2-5287-43b3-b3e0-5316f8850c68 req-7af27ffc-bef8-4ca9-a105-e4e3b32cf7e8 service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Refreshing instance network info cache due to event network-changed-45420f68-e309-4569-8dac-28e16d9417d7. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 881.364186] env[62070]: DEBUG oslo_concurrency.lockutils [req-9d39d8b2-5287-43b3-b3e0-5316f8850c68 req-7af27ffc-bef8-4ca9-a105-e4e3b32cf7e8 service nova] Acquiring lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.364365] env[62070]: DEBUG oslo_concurrency.lockutils [req-9d39d8b2-5287-43b3-b3e0-5316f8850c68 req-7af27ffc-bef8-4ca9-a105-e4e3b32cf7e8 service nova] Acquired lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.364497] env[62070]: DEBUG nova.network.neutron [req-9d39d8b2-5287-43b3-b3e0-5316f8850c68 req-7af27ffc-bef8-4ca9-a105-e4e3b32cf7e8 service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Refreshing network info cache for port 45420f68-e309-4569-8dac-28e16d9417d7 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 881.540927] env[62070]: DEBUG nova.network.neutron [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Successfully created port: 4488d696-fdbf-44e5-9b57-3915f167f3ad {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 881.567305] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Acquiring lock "refresh_cache-559eee5b-0834-4dcf-a436-5e58644c7a3b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.567305] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Acquired lock "refresh_cache-559eee5b-0834-4dcf-a436-5e58644c7a3b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.567455] env[62070]: DEBUG nova.network.neutron [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 881.644356] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d816abdd-4620-47e5-b50e-d0435bd6a4bc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.652862] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e42dc67-7d13-482d-a817-09c83655e3bf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.683791] env[62070]: DEBUG nova.compute.manager [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 881.689897] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5b0ea8b7-b917-423f-a690-eae734229813 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "refresh_cache-4a5f644a-1670-4c6b-a762-f87f1ee4cce5" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.689897] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5b0ea8b7-b917-423f-a690-eae734229813 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquired lock "refresh_cache-4a5f644a-1670-4c6b-a762-f87f1ee4cce5" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.690073] env[62070]: DEBUG nova.network.neutron [None req-5b0ea8b7-b917-423f-a690-eae734229813 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 881.690142] env[62070]: DEBUG nova.objects.instance [None req-5b0ea8b7-b917-423f-a690-eae734229813 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lazy-loading 'info_cache' on Instance uuid 4a5f644a-1670-4c6b-a762-f87f1ee4cce5 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 881.692279] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-245b6e4d-bdfa-44b0-9f5b-275f3fa6463c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.700267] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa64e74-7acf-4eb9-998b-c7fcb991fd41 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.719037] env[62070]: DEBUG nova.compute.provider_tree [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 881.722450] env[62070]: DEBUG oslo_concurrency.lockutils [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "58146b84-7589-4f21-bdab-605cee535e55" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.722703] env[62070]: DEBUG oslo_concurrency.lockutils [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "58146b84-7589-4f21-bdab-605cee535e55" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.722914] env[62070]: DEBUG oslo_concurrency.lockutils [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "58146b84-7589-4f21-bdab-605cee535e55-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.723132] env[62070]: DEBUG oslo_concurrency.lockutils [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "58146b84-7589-4f21-bdab-605cee535e55-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.723280] env[62070]: DEBUG oslo_concurrency.lockutils [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "58146b84-7589-4f21-bdab-605cee535e55-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.726503] env[62070]: INFO nova.compute.manager [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Terminating instance [ 881.733078] env[62070]: DEBUG nova.compute.manager [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 881.733303] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 881.734545] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041727e0-d862-4be9-9d4f-1349851759d8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.743114] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 881.747605] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-25c9e6c7-fcbc-422e-84b7-c6c6cd5fe043 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.755514] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121966, 'name': CreateVM_Task, 'duration_secs': 0.382153} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.756964] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 881.757340] env[62070]: DEBUG oslo_vmware.api [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 881.757340] env[62070]: value = "task-1121969" [ 881.757340] env[62070]: _type = "Task" [ 881.757340] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.758159] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.758351] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.758705] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 881.759045] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba523692-3162-4a76-adc5-8c4b856f2eb0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.767553] env[62070]: DEBUG oslo_concurrency.lockutils [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.772465] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 881.772465] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5286f782-3613-2963-a8ed-c3f0586a8d0b" [ 881.772465] env[62070]: _type = "Task" [ 881.772465] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.772715] env[62070]: DEBUG oslo_vmware.api [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121969, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.782109] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5286f782-3613-2963-a8ed-c3f0586a8d0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.812083] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Task: {'id': task-1121967, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.853185] env[62070]: DEBUG oslo_vmware.api [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Task: {'id': task-1121968, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496061} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.853249] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef/71c98ac8-4149-448b-bf0c-3bfdcc8f50ef.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 881.853459] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 881.853740] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-30098173-7766-44c9-a78d-e2130a967df5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.860088] env[62070]: DEBUG oslo_vmware.api [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Waiting for the task: (returnval){ [ 881.860088] env[62070]: value = "task-1121970" [ 881.860088] env[62070]: _type = "Task" [ 881.860088] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.871070] env[62070]: DEBUG oslo_vmware.api [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Task: {'id': task-1121970, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.114158] env[62070]: DEBUG nova.network.neutron [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 882.193567] env[62070]: DEBUG nova.objects.base [None req-5b0ea8b7-b917-423f-a690-eae734229813 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Object Instance<4a5f644a-1670-4c6b-a762-f87f1ee4cce5> lazy-loaded attributes: flavor,info_cache {{(pid=62070) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 882.221626] env[62070]: DEBUG nova.scheduler.client.report [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 882.233451] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66c55dcf-2f8a-448e-a76b-80ab2517bd14 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.242077] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-8dcef4a2-dbbe-4378-9adf-7931bdf45f02 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Doing hard reboot of VM {{(pid=62070) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1064}} [ 882.242590] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-3ae7ab11-9eb3-410c-aafa-fa7b1c6cb3e4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.248807] env[62070]: DEBUG oslo_vmware.api [None req-8dcef4a2-dbbe-4378-9adf-7931bdf45f02 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 882.248807] env[62070]: value = "task-1121971" [ 882.248807] env[62070]: _type = "Task" [ 882.248807] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.258801] env[62070]: DEBUG oslo_vmware.api [None req-8dcef4a2-dbbe-4378-9adf-7931bdf45f02 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121971, 'name': ResetVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.270627] env[62070]: DEBUG oslo_vmware.api [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121969, 'name': PowerOffVM_Task, 'duration_secs': 0.204368} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.270947] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 882.271321] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 882.271634] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-405ee3c6-718f-4687-ac28-aa93dcf33f6e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.281420] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5286f782-3613-2963-a8ed-c3f0586a8d0b, 'name': SearchDatastore_Task, 'duration_secs': 0.055766} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.281738] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.281977] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 882.282239] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.282394] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.282579] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 882.282833] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-605c51f0-b0f3-4584-9ab9-93ddc374f49d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.285134] env[62070]: DEBUG nova.network.neutron [req-9d39d8b2-5287-43b3-b3e0-5316f8850c68 req-7af27ffc-bef8-4ca9-a105-e4e3b32cf7e8 service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Updated VIF entry in instance network info cache for port 45420f68-e309-4569-8dac-28e16d9417d7. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 882.285476] env[62070]: DEBUG nova.network.neutron [req-9d39d8b2-5287-43b3-b3e0-5316f8850c68 req-7af27ffc-bef8-4ca9-a105-e4e3b32cf7e8 service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Updating instance_info_cache with network_info: [{"id": "45420f68-e309-4569-8dac-28e16d9417d7", "address": "fa:16:3e:08:c2:70", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45420f68-e3", "ovs_interfaceid": "45420f68-e309-4569-8dac-28e16d9417d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.300243] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 882.300448] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 882.301477] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-993e14a7-482d-4504-8c30-0a7aaeff60b0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.311195] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 882.311195] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52e6e58c-050d-c6a6-2970-4e4a4fb8e434" [ 882.311195] env[62070]: _type = "Task" [ 882.311195] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.313828] env[62070]: DEBUG oslo_vmware.api [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Task: {'id': task-1121967, 'name': PowerOnVM_Task, 'duration_secs': 0.710655} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.316665] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 882.316909] env[62070]: DEBUG nova.compute.manager [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 882.318153] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a242ee-0359-4479-a9f3-d29ca1a9724a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.325049] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52e6e58c-050d-c6a6-2970-4e4a4fb8e434, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.348741] env[62070]: DEBUG nova.network.neutron [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Updating instance_info_cache with network_info: [{"id": "c24d842f-4fc0-417f-a913-acda1bd7c41b", "address": "fa:16:3e:9d:d6:f1", "network": {"id": "f4af58a1-85b4-4c02-814f-2896adf35801", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-859480877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "647582b6f4a048aea74c761f4e136a34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc24d842f-4f", "ovs_interfaceid": "c24d842f-4fc0-417f-a913-acda1bd7c41b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.355019] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 882.355019] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 882.355019] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Deleting the datastore file [datastore2] 58146b84-7589-4f21-bdab-605cee535e55 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 882.355019] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6234e440-f6ee-42b9-a134-ce71edec4b1c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.359145] env[62070]: DEBUG oslo_vmware.api [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 882.359145] env[62070]: value = "task-1121973" [ 882.359145] env[62070]: _type = "Task" [ 882.359145] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.371874] env[62070]: DEBUG oslo_vmware.api [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121973, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.375133] env[62070]: DEBUG oslo_vmware.api [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Task: {'id': task-1121970, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065578} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.375133] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 882.375889] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b793338b-1c81-4b9c-b0ee-3bbefa1c5e24 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.397696] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef/71c98ac8-4149-448b-bf0c-3bfdcc8f50ef.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 882.398374] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9678660-e642-46da-bd0e-9e73040feb07 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.418128] env[62070]: DEBUG oslo_vmware.api [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Waiting for the task: (returnval){ [ 882.418128] env[62070]: value = "task-1121974" [ 882.418128] env[62070]: _type = "Task" [ 882.418128] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.425773] env[62070]: DEBUG oslo_vmware.api [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Task: {'id': task-1121974, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.634903] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4ed0d70b-94fb-4c1c-826a-6f09c66aec65 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "62758a38-4819-4d5a-97ed-db6c9ceb97bf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.692781] env[62070]: DEBUG nova.compute.manager [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 882.713993] env[62070]: DEBUG nova.virt.hardware [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 882.714322] env[62070]: DEBUG nova.virt.hardware [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 882.714529] env[62070]: DEBUG nova.virt.hardware [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 882.714767] env[62070]: DEBUG nova.virt.hardware [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 882.714980] env[62070]: DEBUG nova.virt.hardware [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 882.715203] env[62070]: DEBUG nova.virt.hardware [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 882.715459] env[62070]: DEBUG nova.virt.hardware [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 882.715669] env[62070]: DEBUG nova.virt.hardware [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 882.715887] env[62070]: DEBUG nova.virt.hardware [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 882.716114] env[62070]: DEBUG nova.virt.hardware [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 882.716345] env[62070]: DEBUG nova.virt.hardware [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 882.717386] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a56290b-a904-464b-8905-715884e562de {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.726100] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-898cc96f-f153-4d99-9488-dc0a26b1990d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.731180] env[62070]: DEBUG oslo_concurrency.lockutils [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.072s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.734458] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.364s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.734845] env[62070]: DEBUG nova.objects.instance [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lazy-loading 'resources' on Instance uuid e5deccf6-f967-4e3c-bee0-2e1ad0bb4560 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 882.754120] env[62070]: INFO nova.scheduler.client.report [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Deleted allocations for instance 27987ff6-77c9-4876-8b39-dcc20ce4158a [ 882.763755] env[62070]: DEBUG oslo_vmware.api [None req-8dcef4a2-dbbe-4378-9adf-7931bdf45f02 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1121971, 'name': ResetVM_Task, 'duration_secs': 0.105208} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.764271] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-8dcef4a2-dbbe-4378-9adf-7931bdf45f02 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Did hard reboot of VM {{(pid=62070) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1068}} [ 882.764466] env[62070]: DEBUG nova.compute.manager [None req-8dcef4a2-dbbe-4378-9adf-7931bdf45f02 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 882.765280] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb1e430b-5fcd-4876-9dbf-340a92d7383f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.790109] env[62070]: DEBUG oslo_concurrency.lockutils [req-9d39d8b2-5287-43b3-b3e0-5316f8850c68 req-7af27ffc-bef8-4ca9-a105-e4e3b32cf7e8 service nova] Releasing lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.823996] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52e6e58c-050d-c6a6-2970-4e4a4fb8e434, 'name': SearchDatastore_Task, 'duration_secs': 0.037996} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.824812] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57a4b58a-fe40-4f69-b138-df6839968491 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.834019] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 882.834019] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52f36e4e-0004-b36d-b784-45cce361e50d" [ 882.834019] env[62070]: _type = "Task" [ 882.834019] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.842694] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52f36e4e-0004-b36d-b784-45cce361e50d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.843165] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.853179] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Releasing lock "refresh_cache-559eee5b-0834-4dcf-a436-5e58644c7a3b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.853491] env[62070]: DEBUG nova.compute.manager [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Instance network_info: |[{"id": "c24d842f-4fc0-417f-a913-acda1bd7c41b", "address": "fa:16:3e:9d:d6:f1", "network": {"id": "f4af58a1-85b4-4c02-814f-2896adf35801", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-859480877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "647582b6f4a048aea74c761f4e136a34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc24d842f-4f", "ovs_interfaceid": "c24d842f-4fc0-417f-a913-acda1bd7c41b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 882.853998] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:d6:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27138a4c-60c9-45fb-bf37-4c2f765315a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c24d842f-4fc0-417f-a913-acda1bd7c41b', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 882.862170] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Creating folder: Project (647582b6f4a048aea74c761f4e136a34). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 882.862523] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9847563d-5d7c-46fa-bb99-d894cafb1791 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.870270] env[62070]: DEBUG nova.compute.manager [req-914a68e1-374e-41fd-b45e-06e97ddc067f req-ad2e38b1-edd1-4978-96fb-49607d81680c service nova] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Received event network-changed-c4a1b7aa-611b-422e-9678-70513f52b764 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 882.870462] env[62070]: DEBUG nova.compute.manager [req-914a68e1-374e-41fd-b45e-06e97ddc067f req-ad2e38b1-edd1-4978-96fb-49607d81680c service nova] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Refreshing instance network info cache due to event network-changed-c4a1b7aa-611b-422e-9678-70513f52b764. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 882.870692] env[62070]: DEBUG oslo_concurrency.lockutils [req-914a68e1-374e-41fd-b45e-06e97ddc067f req-ad2e38b1-edd1-4978-96fb-49607d81680c service nova] Acquiring lock "refresh_cache-62758a38-4819-4d5a-97ed-db6c9ceb97bf" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.870815] env[62070]: DEBUG oslo_concurrency.lockutils [req-914a68e1-374e-41fd-b45e-06e97ddc067f req-ad2e38b1-edd1-4978-96fb-49607d81680c service nova] Acquired lock "refresh_cache-62758a38-4819-4d5a-97ed-db6c9ceb97bf" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.870984] env[62070]: DEBUG nova.network.neutron [req-914a68e1-374e-41fd-b45e-06e97ddc067f req-ad2e38b1-edd1-4978-96fb-49607d81680c service nova] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Refreshing network info cache for port c4a1b7aa-611b-422e-9678-70513f52b764 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 882.877947] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Created folder: Project (647582b6f4a048aea74c761f4e136a34) in parent group-v245319. [ 882.878211] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Creating folder: Instances. Parent ref: group-v245445. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 882.881661] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-31a0d9e0-407d-44a2-9954-714d3272b1a6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.883282] env[62070]: DEBUG oslo_vmware.api [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1121973, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.514815} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.884211] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 882.884407] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 882.884582] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 882.884764] env[62070]: INFO nova.compute.manager [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Took 1.15 seconds to destroy the instance on the hypervisor. [ 882.885011] env[62070]: DEBUG oslo.service.loopingcall [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 882.885730] env[62070]: DEBUG nova.compute.manager [-] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 882.885830] env[62070]: DEBUG nova.network.neutron [-] [instance: 58146b84-7589-4f21-bdab-605cee535e55] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 882.892883] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Created folder: Instances in parent group-v245445. [ 882.893135] env[62070]: DEBUG oslo.service.loopingcall [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 882.893529] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 882.893747] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cd715453-bd55-44e3-9f03-20cde214b035 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.913727] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 882.913727] env[62070]: value = "task-1121977" [ 882.913727] env[62070]: _type = "Task" [ 882.913727] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.936018] env[62070]: DEBUG oslo_vmware.api [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Task: {'id': task-1121974, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.936249] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121977, 'name': CreateVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.091015] env[62070]: DEBUG nova.network.neutron [None req-5b0ea8b7-b917-423f-a690-eae734229813 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Updating instance_info_cache with network_info: [{"id": "3a8213ef-a979-487a-8756-7bfecdf4ba10", "address": "fa:16:3e:25:2e:9f", "network": {"id": "443d2d62-bcef-44b2-814a-3e5dc50abc04", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-772061432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85c0cc8e0f544bfbb76970d3123fbb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a8213ef-a9", "ovs_interfaceid": "3a8213ef-a979-487a-8756-7bfecdf4ba10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.267543] env[62070]: DEBUG oslo_concurrency.lockutils [None req-361b1276-74ee-478a-b8ef-42cc811e08e6 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "27987ff6-77c9-4876-8b39-dcc20ce4158a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.638s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.283509] env[62070]: DEBUG nova.network.neutron [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Successfully updated port: 4488d696-fdbf-44e5-9b57-3915f167f3ad {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 883.284763] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8dcef4a2-dbbe-4378-9adf-7931bdf45f02 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "71aead12-a182-40a7-b5a9-91c01271b800" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.340s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.347259] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52f36e4e-0004-b36d-b784-45cce361e50d, 'name': SearchDatastore_Task, 'duration_secs': 0.011476} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.347607] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.348012] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 21bcb1a6-833b-48f3-8ee2-0e49c64a104f/21bcb1a6-833b-48f3-8ee2-0e49c64a104f.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 883.348347] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0db35f9c-d4e6-4391-af6d-bb9277616ece {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.361337] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 883.361337] env[62070]: value = "task-1121978" [ 883.361337] env[62070]: _type = "Task" [ 883.361337] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.373334] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121978, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.391958] env[62070]: DEBUG nova.compute.manager [req-06968043-d944-42e8-83fd-4250c3e48512 req-eb3ae25f-8a65-4a18-ba76-fa780c554c53 service nova] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Received event network-vif-deleted-f1b26c6a-d4b8-49c8-b247-27a2e9e76076 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 883.392652] env[62070]: INFO nova.compute.manager [req-06968043-d944-42e8-83fd-4250c3e48512 req-eb3ae25f-8a65-4a18-ba76-fa780c554c53 service nova] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Neutron deleted interface f1b26c6a-d4b8-49c8-b247-27a2e9e76076; detaching it from the instance and deleting it from the info cache [ 883.392864] env[62070]: DEBUG nova.network.neutron [req-06968043-d944-42e8-83fd-4250c3e48512 req-eb3ae25f-8a65-4a18-ba76-fa780c554c53 service nova] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.427068] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Acquiring lock "61ab347d-1342-4f59-8955-10d575993b77" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.427469] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Lock "61ab347d-1342-4f59-8955-10d575993b77" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.427596] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Acquiring lock "61ab347d-1342-4f59-8955-10d575993b77-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.427818] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Lock "61ab347d-1342-4f59-8955-10d575993b77-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.428127] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Lock "61ab347d-1342-4f59-8955-10d575993b77-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.430086] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121977, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.435030] env[62070]: INFO nova.compute.manager [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Terminating instance [ 883.440651] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Acquiring lock "refresh_cache-61ab347d-1342-4f59-8955-10d575993b77" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.440651] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Acquired lock "refresh_cache-61ab347d-1342-4f59-8955-10d575993b77" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.440651] env[62070]: DEBUG nova.network.neutron [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 883.445459] env[62070]: DEBUG oslo_vmware.api [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Task: {'id': task-1121974, 'name': ReconfigVM_Task, 'duration_secs': 0.686613} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.445459] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Reconfigured VM instance instance-0000004a to attach disk [datastore2] 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef/71c98ac8-4149-448b-bf0c-3bfdcc8f50ef.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 883.446167] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a77249ca-a1a3-46ac-97e2-f742f0f651dd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.453945] env[62070]: DEBUG oslo_vmware.api [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Waiting for the task: (returnval){ [ 883.453945] env[62070]: value = "task-1121979" [ 883.453945] env[62070]: _type = "Task" [ 883.453945] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.465747] env[62070]: DEBUG oslo_vmware.api [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Task: {'id': task-1121979, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.593953] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5b0ea8b7-b917-423f-a690-eae734229813 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Releasing lock "refresh_cache-4a5f644a-1670-4c6b-a762-f87f1ee4cce5" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.627753] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6016473-a126-4838-999e-1fdd55d338e6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.642922] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c391b0fe-2068-4946-b4b1-4621238410ce {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.680096] env[62070]: DEBUG nova.network.neutron [req-914a68e1-374e-41fd-b45e-06e97ddc067f req-ad2e38b1-edd1-4978-96fb-49607d81680c service nova] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Updated VIF entry in instance network info cache for port c4a1b7aa-611b-422e-9678-70513f52b764. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 883.680538] env[62070]: DEBUG nova.network.neutron [req-914a68e1-374e-41fd-b45e-06e97ddc067f req-ad2e38b1-edd1-4978-96fb-49607d81680c service nova] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Updating instance_info_cache with network_info: [{"id": "c4a1b7aa-611b-422e-9678-70513f52b764", "address": "fa:16:3e:12:de:d6", "network": {"id": "5ea0fffc-372c-450e-b27b-10959077d58f", "bridge": null, "label": "tempest-DeleteServersTestJSON-1853458988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9191f0e6c2ee401abca64c0780e230bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapc4a1b7aa-61", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.682625] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f05ae34-b699-4b71-bffc-9dd187fe045c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.691602] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d754465f-34f3-4a94-8323-c97cea95d85b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.710504] env[62070]: DEBUG nova.compute.provider_tree [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 883.746514] env[62070]: DEBUG nova.network.neutron [-] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.787778] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Acquiring lock "refresh_cache-d2cfcfac-4f15-4b16-9046-76722ee2e39b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.787778] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Acquired lock "refresh_cache-d2cfcfac-4f15-4b16-9046-76722ee2e39b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.787778] env[62070]: DEBUG nova.network.neutron [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 883.872482] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121978, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.898431] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8880198d-5145-494d-94ef-453a5bf00260 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.910222] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3b2a2a-449a-4886-80bd-7889a506dc54 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.933784] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121977, 'name': CreateVM_Task, 'duration_secs': 0.531295} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.933971] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 883.934738] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.935072] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.935566] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 883.936332] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e8085a3-d364-4c51-9ae0-41db75d06b3f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.951917] env[62070]: DEBUG nova.compute.manager [req-06968043-d944-42e8-83fd-4250c3e48512 req-eb3ae25f-8a65-4a18-ba76-fa780c554c53 service nova] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Detach interface failed, port_id=f1b26c6a-d4b8-49c8-b247-27a2e9e76076, reason: Instance 58146b84-7589-4f21-bdab-605cee535e55 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 883.957176] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Waiting for the task: (returnval){ [ 883.957176] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]521c9c41-0675-3760-a037-df5fdd8db821" [ 883.957176] env[62070]: _type = "Task" [ 883.957176] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.970139] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]521c9c41-0675-3760-a037-df5fdd8db821, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.971100] env[62070]: DEBUG nova.network.neutron [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 883.975888] env[62070]: DEBUG oslo_vmware.api [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Task: {'id': task-1121979, 'name': Rename_Task, 'duration_secs': 0.258551} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.976555] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 883.976868] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e3be8ae9-e848-4dfe-a6fe-33b04b0f388f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.986675] env[62070]: DEBUG oslo_vmware.api [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Waiting for the task: (returnval){ [ 883.986675] env[62070]: value = "task-1121980" [ 883.986675] env[62070]: _type = "Task" [ 883.986675] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.996410] env[62070]: DEBUG oslo_vmware.api [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Task: {'id': task-1121980, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.061493] env[62070]: DEBUG nova.network.neutron [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.099119] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b0ea8b7-b917-423f-a690-eae734229813 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 884.099472] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bade347d-f676-4ce2-b67e-514f81cdaba1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.108056] env[62070]: DEBUG oslo_vmware.api [None req-5b0ea8b7-b917-423f-a690-eae734229813 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 884.108056] env[62070]: value = "task-1121981" [ 884.108056] env[62070]: _type = "Task" [ 884.108056] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.117046] env[62070]: DEBUG oslo_vmware.api [None req-5b0ea8b7-b917-423f-a690-eae734229813 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1121981, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.188054] env[62070]: DEBUG oslo_concurrency.lockutils [req-914a68e1-374e-41fd-b45e-06e97ddc067f req-ad2e38b1-edd1-4978-96fb-49607d81680c service nova] Releasing lock "refresh_cache-62758a38-4819-4d5a-97ed-db6c9ceb97bf" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.188054] env[62070]: DEBUG nova.compute.manager [req-914a68e1-374e-41fd-b45e-06e97ddc067f req-ad2e38b1-edd1-4978-96fb-49607d81680c service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Received event network-vif-plugged-c24d842f-4fc0-417f-a913-acda1bd7c41b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 884.188054] env[62070]: DEBUG oslo_concurrency.lockutils [req-914a68e1-374e-41fd-b45e-06e97ddc067f req-ad2e38b1-edd1-4978-96fb-49607d81680c service nova] Acquiring lock "559eee5b-0834-4dcf-a436-5e58644c7a3b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.188263] env[62070]: DEBUG oslo_concurrency.lockutils [req-914a68e1-374e-41fd-b45e-06e97ddc067f req-ad2e38b1-edd1-4978-96fb-49607d81680c service nova] Lock "559eee5b-0834-4dcf-a436-5e58644c7a3b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.188304] env[62070]: DEBUG oslo_concurrency.lockutils [req-914a68e1-374e-41fd-b45e-06e97ddc067f req-ad2e38b1-edd1-4978-96fb-49607d81680c service nova] Lock "559eee5b-0834-4dcf-a436-5e58644c7a3b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.188573] env[62070]: DEBUG nova.compute.manager [req-914a68e1-374e-41fd-b45e-06e97ddc067f req-ad2e38b1-edd1-4978-96fb-49607d81680c service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] No waiting events found dispatching network-vif-plugged-c24d842f-4fc0-417f-a913-acda1bd7c41b {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 884.188805] env[62070]: WARNING nova.compute.manager [req-914a68e1-374e-41fd-b45e-06e97ddc067f req-ad2e38b1-edd1-4978-96fb-49607d81680c service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Received unexpected event network-vif-plugged-c24d842f-4fc0-417f-a913-acda1bd7c41b for instance with vm_state building and task_state spawning. [ 884.189034] env[62070]: DEBUG nova.compute.manager [req-914a68e1-374e-41fd-b45e-06e97ddc067f req-ad2e38b1-edd1-4978-96fb-49607d81680c service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Received event network-changed-c24d842f-4fc0-417f-a913-acda1bd7c41b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 884.189283] env[62070]: DEBUG nova.compute.manager [req-914a68e1-374e-41fd-b45e-06e97ddc067f req-ad2e38b1-edd1-4978-96fb-49607d81680c service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Refreshing instance network info cache due to event network-changed-c24d842f-4fc0-417f-a913-acda1bd7c41b. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 884.189802] env[62070]: DEBUG oslo_concurrency.lockutils [req-914a68e1-374e-41fd-b45e-06e97ddc067f req-ad2e38b1-edd1-4978-96fb-49607d81680c service nova] Acquiring lock "refresh_cache-559eee5b-0834-4dcf-a436-5e58644c7a3b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.190012] env[62070]: DEBUG oslo_concurrency.lockutils [req-914a68e1-374e-41fd-b45e-06e97ddc067f req-ad2e38b1-edd1-4978-96fb-49607d81680c service nova] Acquired lock "refresh_cache-559eee5b-0834-4dcf-a436-5e58644c7a3b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.190214] env[62070]: DEBUG nova.network.neutron [req-914a68e1-374e-41fd-b45e-06e97ddc067f req-ad2e38b1-edd1-4978-96fb-49607d81680c service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Refreshing network info cache for port c24d842f-4fc0-417f-a913-acda1bd7c41b {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 884.213011] env[62070]: DEBUG nova.scheduler.client.report [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 884.250175] env[62070]: INFO nova.compute.manager [-] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Took 1.36 seconds to deallocate network for instance. [ 884.320167] env[62070]: DEBUG nova.network.neutron [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 884.371989] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121978, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.74994} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.372286] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 21bcb1a6-833b-48f3-8ee2-0e49c64a104f/21bcb1a6-833b-48f3-8ee2-0e49c64a104f.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 884.372507] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 884.372760] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4af0029a-e683-4aa5-ab50-6fe77f2e4e4d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.382014] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 884.382014] env[62070]: value = "task-1121982" [ 884.382014] env[62070]: _type = "Task" [ 884.382014] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.392373] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121982, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.466724] env[62070]: DEBUG nova.network.neutron [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Updating instance_info_cache with network_info: [{"id": "4488d696-fdbf-44e5-9b57-3915f167f3ad", "address": "fa:16:3e:b0:e0:c0", "network": {"id": "406ec621-5ec4-4ca9-b67e-cfa3e71e2e54", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1337655895-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2dae68f478d549e5991c37b80c858468", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "97113f46-d648-4613-b233-069acba18198", "external-id": "nsx-vlan-transportzone-480", "segmentation_id": 480, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4488d696-fd", "ovs_interfaceid": "4488d696-fdbf-44e5-9b57-3915f167f3ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.472213] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]521c9c41-0675-3760-a037-df5fdd8db821, 'name': SearchDatastore_Task, 'duration_secs': 0.060264} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.472779] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.473043] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 884.473292] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.473446] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.473630] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 884.473910] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc424250-4fd6-4887-a813-b86b0a962078 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.484456] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 884.484685] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 884.485769] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aac0f9a6-b229-40f0-a917-7444e9607f77 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.499689] env[62070]: DEBUG oslo_vmware.api [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Task: {'id': task-1121980, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.500038] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Waiting for the task: (returnval){ [ 884.500038] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5269e147-a2b1-f189-0732-f27657f5384a" [ 884.500038] env[62070]: _type = "Task" [ 884.500038] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.509807] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5269e147-a2b1-f189-0732-f27657f5384a, 'name': SearchDatastore_Task, 'duration_secs': 0.011235} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.510607] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64676da3-fae0-47c4-b378-10245c78606a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.516681] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Waiting for the task: (returnval){ [ 884.516681] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52a8bdd4-527b-7541-da98-7e691cccd758" [ 884.516681] env[62070]: _type = "Task" [ 884.516681] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.526154] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52a8bdd4-527b-7541-da98-7e691cccd758, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.564881] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Releasing lock "refresh_cache-61ab347d-1342-4f59-8955-10d575993b77" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.565478] env[62070]: DEBUG nova.compute.manager [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 884.565720] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 884.566661] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a6a024-9072-4f05-95a4-027370a7f022 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.574551] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 884.574815] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c69857d-2ed4-408a-b31a-751d2a25eed5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.581967] env[62070]: DEBUG oslo_vmware.api [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Waiting for the task: (returnval){ [ 884.581967] env[62070]: value = "task-1121983" [ 884.581967] env[62070]: _type = "Task" [ 884.581967] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.590379] env[62070]: DEBUG oslo_vmware.api [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121983, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.619089] env[62070]: DEBUG oslo_vmware.api [None req-5b0ea8b7-b917-423f-a690-eae734229813 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1121981, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.717974] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.984s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.720579] env[62070]: DEBUG oslo_concurrency.lockutils [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.604s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.722151] env[62070]: INFO nova.compute.claims [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 884.741405] env[62070]: INFO nova.scheduler.client.report [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Deleted allocations for instance e5deccf6-f967-4e3c-bee0-2e1ad0bb4560 [ 884.757490] env[62070]: DEBUG oslo_concurrency.lockutils [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.893061] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121982, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074655} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.894554] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 884.895373] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e15ac7b6-deda-40f1-b6fe-8beef5d945ce {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.920084] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] 21bcb1a6-833b-48f3-8ee2-0e49c64a104f/21bcb1a6-833b-48f3-8ee2-0e49c64a104f.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 884.920387] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b6e3a1a-a870-4e13-b2f9-116ee0bd5a8f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.936624] env[62070]: DEBUG nova.compute.manager [req-bc0da6b4-b0c1-455f-ba05-827a2f82692a req-6e19f30a-2ed3-4fde-af97-5d5dc2729137 service nova] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Received event network-vif-plugged-4488d696-fdbf-44e5-9b57-3915f167f3ad {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 884.936875] env[62070]: DEBUG oslo_concurrency.lockutils [req-bc0da6b4-b0c1-455f-ba05-827a2f82692a req-6e19f30a-2ed3-4fde-af97-5d5dc2729137 service nova] Acquiring lock "d2cfcfac-4f15-4b16-9046-76722ee2e39b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.937116] env[62070]: DEBUG oslo_concurrency.lockutils [req-bc0da6b4-b0c1-455f-ba05-827a2f82692a req-6e19f30a-2ed3-4fde-af97-5d5dc2729137 service nova] Lock "d2cfcfac-4f15-4b16-9046-76722ee2e39b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.937296] env[62070]: DEBUG oslo_concurrency.lockutils [req-bc0da6b4-b0c1-455f-ba05-827a2f82692a req-6e19f30a-2ed3-4fde-af97-5d5dc2729137 service nova] Lock "d2cfcfac-4f15-4b16-9046-76722ee2e39b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.937477] env[62070]: DEBUG nova.compute.manager [req-bc0da6b4-b0c1-455f-ba05-827a2f82692a req-6e19f30a-2ed3-4fde-af97-5d5dc2729137 service nova] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] No waiting events found dispatching network-vif-plugged-4488d696-fdbf-44e5-9b57-3915f167f3ad {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 884.937632] env[62070]: WARNING nova.compute.manager [req-bc0da6b4-b0c1-455f-ba05-827a2f82692a req-6e19f30a-2ed3-4fde-af97-5d5dc2729137 service nova] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Received unexpected event network-vif-plugged-4488d696-fdbf-44e5-9b57-3915f167f3ad for instance with vm_state building and task_state spawning. [ 884.937826] env[62070]: DEBUG nova.compute.manager [req-bc0da6b4-b0c1-455f-ba05-827a2f82692a req-6e19f30a-2ed3-4fde-af97-5d5dc2729137 service nova] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Received event network-changed-4488d696-fdbf-44e5-9b57-3915f167f3ad {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 884.937983] env[62070]: DEBUG nova.compute.manager [req-bc0da6b4-b0c1-455f-ba05-827a2f82692a req-6e19f30a-2ed3-4fde-af97-5d5dc2729137 service nova] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Refreshing instance network info cache due to event network-changed-4488d696-fdbf-44e5-9b57-3915f167f3ad. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 884.938173] env[62070]: DEBUG oslo_concurrency.lockutils [req-bc0da6b4-b0c1-455f-ba05-827a2f82692a req-6e19f30a-2ed3-4fde-af97-5d5dc2729137 service nova] Acquiring lock "refresh_cache-d2cfcfac-4f15-4b16-9046-76722ee2e39b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.938991] env[62070]: DEBUG nova.network.neutron [req-914a68e1-374e-41fd-b45e-06e97ddc067f req-ad2e38b1-edd1-4978-96fb-49607d81680c service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Updated VIF entry in instance network info cache for port c24d842f-4fc0-417f-a913-acda1bd7c41b. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 884.939325] env[62070]: DEBUG nova.network.neutron [req-914a68e1-374e-41fd-b45e-06e97ddc067f req-ad2e38b1-edd1-4978-96fb-49607d81680c service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Updating instance_info_cache with network_info: [{"id": "c24d842f-4fc0-417f-a913-acda1bd7c41b", "address": "fa:16:3e:9d:d6:f1", "network": {"id": "f4af58a1-85b4-4c02-814f-2896adf35801", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-859480877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "647582b6f4a048aea74c761f4e136a34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc24d842f-4f", "ovs_interfaceid": "c24d842f-4fc0-417f-a913-acda1bd7c41b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.947104] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 884.947104] env[62070]: value = "task-1121984" [ 884.947104] env[62070]: _type = "Task" [ 884.947104] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.957971] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121984, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.974849] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Releasing lock "refresh_cache-d2cfcfac-4f15-4b16-9046-76722ee2e39b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.975266] env[62070]: DEBUG nova.compute.manager [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Instance network_info: |[{"id": "4488d696-fdbf-44e5-9b57-3915f167f3ad", "address": "fa:16:3e:b0:e0:c0", "network": {"id": "406ec621-5ec4-4ca9-b67e-cfa3e71e2e54", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1337655895-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2dae68f478d549e5991c37b80c858468", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "97113f46-d648-4613-b233-069acba18198", "external-id": "nsx-vlan-transportzone-480", "segmentation_id": 480, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4488d696-fd", "ovs_interfaceid": "4488d696-fdbf-44e5-9b57-3915f167f3ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 884.975980] env[62070]: DEBUG oslo_concurrency.lockutils [req-bc0da6b4-b0c1-455f-ba05-827a2f82692a req-6e19f30a-2ed3-4fde-af97-5d5dc2729137 service nova] Acquired lock "refresh_cache-d2cfcfac-4f15-4b16-9046-76722ee2e39b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.976255] env[62070]: DEBUG nova.network.neutron [req-bc0da6b4-b0c1-455f-ba05-827a2f82692a req-6e19f30a-2ed3-4fde-af97-5d5dc2729137 service nova] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Refreshing network info cache for port 4488d696-fdbf-44e5-9b57-3915f167f3ad {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 884.978825] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:e0:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '97113f46-d648-4613-b233-069acba18198', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4488d696-fdbf-44e5-9b57-3915f167f3ad', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 884.988025] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Creating folder: Project (2dae68f478d549e5991c37b80c858468). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 884.988025] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-33fe094c-e21d-4332-a69b-7dcf07b58fee {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.003129] env[62070]: DEBUG oslo_vmware.api [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Task: {'id': task-1121980, 'name': PowerOnVM_Task, 'duration_secs': 0.518273} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.005175] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 885.005532] env[62070]: INFO nova.compute.manager [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Took 8.03 seconds to spawn the instance on the hypervisor. [ 885.005836] env[62070]: DEBUG nova.compute.manager [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 885.006296] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Created folder: Project (2dae68f478d549e5991c37b80c858468) in parent group-v245319. [ 885.006538] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Creating folder: Instances. Parent ref: group-v245448. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 885.007388] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d9d8bec-25ab-4581-b574-ba96cd818d74 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.010635] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-13a7bb69-03b9-4b84-aa5e-43f0f5d2fd5f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.024224] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Created folder: Instances in parent group-v245448. [ 885.024477] env[62070]: DEBUG oslo.service.loopingcall [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 885.025040] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 885.025264] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-09a42195-f780-4376-ad71-282a806cb513 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.044186] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52a8bdd4-527b-7541-da98-7e691cccd758, 'name': SearchDatastore_Task, 'duration_secs': 0.011787} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.044882] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.045238] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 559eee5b-0834-4dcf-a436-5e58644c7a3b/559eee5b-0834-4dcf-a436-5e58644c7a3b.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 885.045543] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4c4bf5c1-9e64-4022-89b8-4294ec4fa81c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.051490] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 885.051490] env[62070]: value = "task-1121987" [ 885.051490] env[62070]: _type = "Task" [ 885.051490] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.053084] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Waiting for the task: (returnval){ [ 885.053084] env[62070]: value = "task-1121988" [ 885.053084] env[62070]: _type = "Task" [ 885.053084] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.065895] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1121988, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.069438] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121987, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.093947] env[62070]: DEBUG oslo_vmware.api [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121983, 'name': PowerOffVM_Task, 'duration_secs': 0.133028} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.094727] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 885.094727] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 885.094975] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9357e221-c86c-4b0b-b8ac-7a688fe95479 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.122176] env[62070]: DEBUG oslo_vmware.api [None req-5b0ea8b7-b917-423f-a690-eae734229813 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1121981, 'name': PowerOnVM_Task} progress is 81%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.137762] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 885.138064] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 885.138277] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Deleting the datastore file [datastore2] 61ab347d-1342-4f59-8955-10d575993b77 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 885.138628] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b3cb1a90-8ba3-44c2-adc0-44e16d32c857 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.148539] env[62070]: DEBUG oslo_vmware.api [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Waiting for the task: (returnval){ [ 885.148539] env[62070]: value = "task-1121990" [ 885.148539] env[62070]: _type = "Task" [ 885.148539] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.158933] env[62070]: DEBUG oslo_vmware.api [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121990, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.253359] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7fb39e9f-8441-46d0-8705-2c950367f499 tempest-MultipleCreateTestJSON-2024650253 tempest-MultipleCreateTestJSON-2024650253-project-member] Lock "e5deccf6-f967-4e3c-bee0-2e1ad0bb4560" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.536s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.442490] env[62070]: DEBUG oslo_concurrency.lockutils [req-914a68e1-374e-41fd-b45e-06e97ddc067f req-ad2e38b1-edd1-4978-96fb-49607d81680c service nova] Releasing lock "refresh_cache-559eee5b-0834-4dcf-a436-5e58644c7a3b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.458763] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121984, 'name': ReconfigVM_Task, 'duration_secs': 0.321126} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.459128] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Reconfigured VM instance instance-0000004b to attach disk [datastore2] 21bcb1a6-833b-48f3-8ee2-0e49c64a104f/21bcb1a6-833b-48f3-8ee2-0e49c64a104f.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 885.459804] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-84ad82fe-f099-4ce1-9449-8037a1390860 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.471294] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 885.471294] env[62070]: value = "task-1121991" [ 885.471294] env[62070]: _type = "Task" [ 885.471294] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.483753] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121991, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.533319] env[62070]: INFO nova.compute.manager [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Took 37.10 seconds to build instance. [ 885.570189] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1121988, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.573825] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121987, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.621905] env[62070]: DEBUG oslo_vmware.api [None req-5b0ea8b7-b917-423f-a690-eae734229813 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1121981, 'name': PowerOnVM_Task, 'duration_secs': 1.199604} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.622203] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b0ea8b7-b917-423f-a690-eae734229813 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 885.622474] env[62070]: DEBUG nova.compute.manager [None req-5b0ea8b7-b917-423f-a690-eae734229813 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 885.623300] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87f6e351-cb78-45d1-98ec-4c61a85e55ce {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.659725] env[62070]: DEBUG oslo_vmware.api [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Task: {'id': task-1121990, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153089} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.660075] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 885.660345] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 885.660547] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 885.660732] env[62070]: INFO nova.compute.manager [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Took 1.10 seconds to destroy the instance on the hypervisor. [ 885.661900] env[62070]: DEBUG oslo.service.loopingcall [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 885.661900] env[62070]: DEBUG nova.compute.manager [-] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 885.661900] env[62070]: DEBUG nova.network.neutron [-] [instance: 61ab347d-1342-4f59-8955-10d575993b77] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 885.679904] env[62070]: DEBUG nova.network.neutron [-] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 885.761155] env[62070]: DEBUG nova.network.neutron [req-bc0da6b4-b0c1-455f-ba05-827a2f82692a req-6e19f30a-2ed3-4fde-af97-5d5dc2729137 service nova] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Updated VIF entry in instance network info cache for port 4488d696-fdbf-44e5-9b57-3915f167f3ad. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 885.762131] env[62070]: DEBUG nova.network.neutron [req-bc0da6b4-b0c1-455f-ba05-827a2f82692a req-6e19f30a-2ed3-4fde-af97-5d5dc2729137 service nova] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Updating instance_info_cache with network_info: [{"id": "4488d696-fdbf-44e5-9b57-3915f167f3ad", "address": "fa:16:3e:b0:e0:c0", "network": {"id": "406ec621-5ec4-4ca9-b67e-cfa3e71e2e54", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1337655895-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2dae68f478d549e5991c37b80c858468", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "97113f46-d648-4613-b233-069acba18198", "external-id": "nsx-vlan-transportzone-480", "segmentation_id": 480, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4488d696-fd", "ovs_interfaceid": "4488d696-fdbf-44e5-9b57-3915f167f3ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.985629] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121991, 'name': Rename_Task, 'duration_secs': 0.428493} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.985793] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 885.986468] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-34c83c4e-a437-4c71-acdf-b9e5c1d430a1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.995443] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 885.995443] env[62070]: value = "task-1121992" [ 885.995443] env[62070]: _type = "Task" [ 885.995443] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.009076] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121992, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.036032] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7045d499-6270-457a-a3fe-e34d5ae00193 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Lock "71c98ac8-4149-448b-bf0c-3bfdcc8f50ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.617s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.067280] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1121987, 'name': CreateVM_Task, 'duration_secs': 0.553415} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.068081] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 886.068796] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.068996] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.069437] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 886.069725] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d63d388-7e7f-4acb-b701-c6bef528dda5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.075363] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1121988, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.632163} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.076058] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 559eee5b-0834-4dcf-a436-5e58644c7a3b/559eee5b-0834-4dcf-a436-5e58644c7a3b.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 886.076333] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 886.079173] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3ebfbf89-d73d-4f8f-bfd5-9b359ae06b9c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.083890] env[62070]: DEBUG oslo_vmware.api [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Waiting for the task: (returnval){ [ 886.083890] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]520c2544-d590-4cc0-96e2-cbddc361564f" [ 886.083890] env[62070]: _type = "Task" [ 886.083890] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.089084] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Waiting for the task: (returnval){ [ 886.089084] env[62070]: value = "task-1121993" [ 886.089084] env[62070]: _type = "Task" [ 886.089084] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.096028] env[62070]: DEBUG oslo_vmware.api [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]520c2544-d590-4cc0-96e2-cbddc361564f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.105675] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1121993, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.128111] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a26598b7-52e9-468a-9fa1-923840603074 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.138476] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7284fd91-4a7c-4ee0-a021-eced246aa71c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.174456] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c67e2e74-bb86-43dc-959c-f08de157bc6c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.181073] env[62070]: DEBUG nova.network.neutron [-] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.189615] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-895551aa-f465-444b-a934-b8c7b44adbef {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.207794] env[62070]: DEBUG nova.compute.provider_tree [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 886.265655] env[62070]: DEBUG oslo_concurrency.lockutils [req-bc0da6b4-b0c1-455f-ba05-827a2f82692a req-6e19f30a-2ed3-4fde-af97-5d5dc2729137 service nova] Releasing lock "refresh_cache-d2cfcfac-4f15-4b16-9046-76722ee2e39b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.507318] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121992, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.599693] env[62070]: DEBUG oslo_vmware.api [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]520c2544-d590-4cc0-96e2-cbddc361564f, 'name': SearchDatastore_Task, 'duration_secs': 0.014826} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.600655] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.602086] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 886.602086] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.602086] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.602086] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 886.606335] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-acd7efd8-7509-420b-a6cc-c802157c4d9d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.608996] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1121993, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.110052} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.609348] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 886.610558] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c5aa2d-d6b0-4260-a4dc-350c645b1f58 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.635728] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] 559eee5b-0834-4dcf-a436-5e58644c7a3b/559eee5b-0834-4dcf-a436-5e58644c7a3b.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 886.636665] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ce5fa9f-036e-40f4-93ed-0e0cdf0b0e64 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.650970] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 886.651192] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 886.652145] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8e15553-ce75-47de-9f4f-d233e9d7d0f5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.658561] env[62070]: DEBUG oslo_vmware.api [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Waiting for the task: (returnval){ [ 886.658561] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52115da6-bbba-e799-776c-92f5b5c2f34b" [ 886.658561] env[62070]: _type = "Task" [ 886.658561] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.663421] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Waiting for the task: (returnval){ [ 886.663421] env[62070]: value = "task-1121994" [ 886.663421] env[62070]: _type = "Task" [ 886.663421] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.672135] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1121994, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.675026] env[62070]: DEBUG oslo_vmware.api [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52115da6-bbba-e799-776c-92f5b5c2f34b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.687813] env[62070]: INFO nova.compute.manager [-] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Took 1.03 seconds to deallocate network for instance. [ 886.711085] env[62070]: DEBUG nova.scheduler.client.report [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 886.974775] env[62070]: DEBUG oslo_concurrency.lockutils [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Acquiring lock "71c98ac8-4149-448b-bf0c-3bfdcc8f50ef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.975099] env[62070]: DEBUG oslo_concurrency.lockutils [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Lock "71c98ac8-4149-448b-bf0c-3bfdcc8f50ef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.975313] env[62070]: DEBUG oslo_concurrency.lockutils [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Acquiring lock "71c98ac8-4149-448b-bf0c-3bfdcc8f50ef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.975607] env[62070]: DEBUG oslo_concurrency.lockutils [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Lock "71c98ac8-4149-448b-bf0c-3bfdcc8f50ef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.975844] env[62070]: DEBUG oslo_concurrency.lockutils [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Lock "71c98ac8-4149-448b-bf0c-3bfdcc8f50ef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.978453] env[62070]: INFO nova.compute.manager [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Terminating instance [ 886.980441] env[62070]: DEBUG nova.compute.manager [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 886.980627] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 886.982165] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b3de6a8-a197-45c4-b4a4-bd998df72840 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.991630] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 886.991926] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9e211129-6f0b-4469-8c8b-6d276583ebf5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.002447] env[62070]: DEBUG oslo_vmware.api [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Waiting for the task: (returnval){ [ 887.002447] env[62070]: value = "task-1121995" [ 887.002447] env[62070]: _type = "Task" [ 887.002447] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.012597] env[62070]: DEBUG oslo_vmware.api [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1121992, 'name': PowerOnVM_Task, 'duration_secs': 0.606828} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.013568] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 887.013950] env[62070]: INFO nova.compute.manager [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Took 8.49 seconds to spawn the instance on the hypervisor. [ 887.014375] env[62070]: DEBUG nova.compute.manager [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 887.015457] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-746c53fa-b187-4cde-9919-81650839420e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.022195] env[62070]: DEBUG oslo_vmware.api [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Task: {'id': task-1121995, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.170646] env[62070]: DEBUG oslo_vmware.api [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52115da6-bbba-e799-776c-92f5b5c2f34b, 'name': SearchDatastore_Task, 'duration_secs': 0.024664} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.171777] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1408a422-84da-4f97-a0bd-6baafd1ca336 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.177023] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1121994, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.180223] env[62070]: DEBUG oslo_vmware.api [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Waiting for the task: (returnval){ [ 887.180223] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5260fa4b-82e9-f311-166d-502ceff2ab29" [ 887.180223] env[62070]: _type = "Task" [ 887.180223] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.190062] env[62070]: DEBUG oslo_vmware.api [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5260fa4b-82e9-f311-166d-502ceff2ab29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.195940] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 887.217064] env[62070]: DEBUG oslo_concurrency.lockutils [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.496s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.217676] env[62070]: DEBUG nova.compute.manager [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 887.220516] env[62070]: DEBUG oslo_concurrency.lockutils [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.889s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 887.220753] env[62070]: DEBUG nova.objects.instance [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Lazy-loading 'resources' on Instance uuid e74fd58c-cfa8-45c4-8f02-96234b4a9192 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 887.514284] env[62070]: DEBUG oslo_vmware.api [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Task: {'id': task-1121995, 'name': PowerOffVM_Task, 'duration_secs': 0.201137} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.514602] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 887.514803] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 887.515153] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-030c7e5f-af55-48f7-b4ca-734e88dc7368 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.539901] env[62070]: INFO nova.compute.manager [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Took 35.22 seconds to build instance. [ 887.590735] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 887.591052] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 887.591204] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Deleting the datastore file [datastore2] 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 887.591511] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9d779712-b6a4-4b93-8f95-f294634652a6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.599025] env[62070]: DEBUG oslo_vmware.api [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Waiting for the task: (returnval){ [ 887.599025] env[62070]: value = "task-1121997" [ 887.599025] env[62070]: _type = "Task" [ 887.599025] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.609733] env[62070]: DEBUG oslo_vmware.api [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Task: {'id': task-1121997, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.674929] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1121994, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.691583] env[62070]: DEBUG oslo_vmware.api [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5260fa4b-82e9-f311-166d-502ceff2ab29, 'name': SearchDatastore_Task, 'duration_secs': 0.012185} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.691902] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 887.692212] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] d2cfcfac-4f15-4b16-9046-76722ee2e39b/d2cfcfac-4f15-4b16-9046-76722ee2e39b.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 887.692589] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-237ea8e5-be1f-4375-b058-8e66278e7257 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.700413] env[62070]: DEBUG oslo_vmware.api [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Waiting for the task: (returnval){ [ 887.700413] env[62070]: value = "task-1121998" [ 887.700413] env[62070]: _type = "Task" [ 887.700413] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.710209] env[62070]: DEBUG oslo_vmware.api [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Task: {'id': task-1121998, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.726770] env[62070]: DEBUG nova.compute.utils [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 887.728734] env[62070]: DEBUG nova.compute.manager [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 887.728931] env[62070]: DEBUG nova.network.neutron [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 887.822458] env[62070]: DEBUG nova.policy [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58b377e0d90a45a89966048bd20f609f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1a94db233e3a43dc9aa7cf887c6cb1f6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 888.041585] env[62070]: DEBUG oslo_concurrency.lockutils [None req-24d48b36-01e1-4177-8824-76508bde5cc0 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "21bcb1a6-833b-48f3-8ee2-0e49c64a104f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.738s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.117027] env[62070]: DEBUG oslo_vmware.api [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Task: {'id': task-1121997, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.261324} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.117027] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 888.117027] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 888.117027] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 888.117027] env[62070]: INFO nova.compute.manager [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Took 1.13 seconds to destroy the instance on the hypervisor. [ 888.117027] env[62070]: DEBUG oslo.service.loopingcall [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 888.117027] env[62070]: DEBUG nova.compute.manager [-] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 888.117027] env[62070]: DEBUG nova.network.neutron [-] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 888.142381] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c142a64a-3f10-41c6-b8b6-73a1e4c787d0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.156024] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9097cad-32ed-4ad9-85a4-c01096fa7f42 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.203314] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d48047-1335-4523-a870-18e7e0c180eb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.217288] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1121994, 'name': ReconfigVM_Task, 'duration_secs': 1.048129} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.223791] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Reconfigured VM instance instance-0000004c to attach disk [datastore2] 559eee5b-0834-4dcf-a436-5e58644c7a3b/559eee5b-0834-4dcf-a436-5e58644c7a3b.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 888.230034] env[62070]: DEBUG oslo_vmware.api [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Task: {'id': task-1121998, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.230034] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-52aad434-df7f-4b13-ad8f-9c8202981e50 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.231235] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a26da6-58cd-41d7-a0c7-64f9597aee38 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.237790] env[62070]: DEBUG nova.compute.manager [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 888.260381] env[62070]: DEBUG nova.compute.provider_tree [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 888.261824] env[62070]: DEBUG nova.network.neutron [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Successfully created port: cac26624-11c7-45a9-acb3-3e86b7232ab2 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 888.265850] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Waiting for the task: (returnval){ [ 888.265850] env[62070]: value = "task-1121999" [ 888.265850] env[62070]: _type = "Task" [ 888.265850] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.278425] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1121999, 'name': Rename_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.559806] env[62070]: DEBUG nova.compute.manager [req-b061fdde-c78e-4941-bfb3-cb6abfda74d9 req-63522f50-6cdc-4830-ab3b-8fcd8b88da2d service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Received event network-changed-45420f68-e309-4569-8dac-28e16d9417d7 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 888.560040] env[62070]: DEBUG nova.compute.manager [req-b061fdde-c78e-4941-bfb3-cb6abfda74d9 req-63522f50-6cdc-4830-ab3b-8fcd8b88da2d service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Refreshing instance network info cache due to event network-changed-45420f68-e309-4569-8dac-28e16d9417d7. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 888.560468] env[62070]: DEBUG oslo_concurrency.lockutils [req-b061fdde-c78e-4941-bfb3-cb6abfda74d9 req-63522f50-6cdc-4830-ab3b-8fcd8b88da2d service nova] Acquiring lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.560941] env[62070]: DEBUG oslo_concurrency.lockutils [req-b061fdde-c78e-4941-bfb3-cb6abfda74d9 req-63522f50-6cdc-4830-ab3b-8fcd8b88da2d service nova] Acquired lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.560941] env[62070]: DEBUG nova.network.neutron [req-b061fdde-c78e-4941-bfb3-cb6abfda74d9 req-63522f50-6cdc-4830-ab3b-8fcd8b88da2d service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Refreshing network info cache for port 45420f68-e309-4569-8dac-28e16d9417d7 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 888.716732] env[62070]: DEBUG oslo_vmware.api [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Task: {'id': task-1121998, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.688164} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.717314] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] d2cfcfac-4f15-4b16-9046-76722ee2e39b/d2cfcfac-4f15-4b16-9046-76722ee2e39b.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 888.717769] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 888.720305] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-644a675d-b79e-40b8-938d-307324768173 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.726327] env[62070]: DEBUG oslo_vmware.api [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Waiting for the task: (returnval){ [ 888.726327] env[62070]: value = "task-1122000" [ 888.726327] env[62070]: _type = "Task" [ 888.726327] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.737698] env[62070]: DEBUG oslo_vmware.api [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Task: {'id': task-1122000, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.770071] env[62070]: DEBUG nova.scheduler.client.report [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 888.782951] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1121999, 'name': Rename_Task, 'duration_secs': 0.445118} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.783832] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 888.784135] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ed7159a-449b-4add-addb-64dd68e0dd4c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.793124] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Waiting for the task: (returnval){ [ 888.793124] env[62070]: value = "task-1122001" [ 888.793124] env[62070]: _type = "Task" [ 888.793124] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.801929] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1122001, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.071932] env[62070]: DEBUG nova.network.neutron [-] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.239405] env[62070]: DEBUG oslo_vmware.api [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Task: {'id': task-1122000, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065188} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.239405] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 889.239405] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b00b2f9-6f1f-4ffa-8a91-8a4750fd48bf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.255924] env[62070]: DEBUG nova.compute.manager [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 889.266743] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] d2cfcfac-4f15-4b16-9046-76722ee2e39b/d2cfcfac-4f15-4b16-9046-76722ee2e39b.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 889.269959] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f47c6dc1-2195-447e-87b2-32b3e992c3e5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.288374] env[62070]: DEBUG oslo_concurrency.lockutils [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.068s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.291057] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.406s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.291647] env[62070]: DEBUG nova.objects.instance [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Lazy-loading 'resources' on Instance uuid 4bba7448-69f7-4764-9ae6-eb6585f71515 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 889.299572] env[62070]: DEBUG nova.virt.hardware [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 889.299856] env[62070]: DEBUG nova.virt.hardware [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 889.300066] env[62070]: DEBUG nova.virt.hardware [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 889.300325] env[62070]: DEBUG nova.virt.hardware [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 889.300516] env[62070]: DEBUG nova.virt.hardware [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 889.300679] env[62070]: DEBUG nova.virt.hardware [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 889.300897] env[62070]: DEBUG nova.virt.hardware [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 889.301075] env[62070]: DEBUG nova.virt.hardware [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 889.301430] env[62070]: DEBUG nova.virt.hardware [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 889.301669] env[62070]: DEBUG nova.virt.hardware [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 889.301886] env[62070]: DEBUG nova.virt.hardware [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 889.302840] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006caffb-a300-4432-a7b2-064270cdcfa9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.308992] env[62070]: DEBUG oslo_vmware.api [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Waiting for the task: (returnval){ [ 889.308992] env[62070]: value = "task-1122002" [ 889.308992] env[62070]: _type = "Task" [ 889.308992] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.316369] env[62070]: INFO nova.scheduler.client.report [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Deleted allocations for instance e74fd58c-cfa8-45c4-8f02-96234b4a9192 [ 889.321167] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1122001, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.325549] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc683335-ae51-421f-92cf-69f19fc1b396 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.337449] env[62070]: DEBUG oslo_vmware.api [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Task: {'id': task-1122002, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.397424] env[62070]: DEBUG nova.network.neutron [req-b061fdde-c78e-4941-bfb3-cb6abfda74d9 req-63522f50-6cdc-4830-ab3b-8fcd8b88da2d service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Updated VIF entry in instance network info cache for port 45420f68-e309-4569-8dac-28e16d9417d7. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 889.398471] env[62070]: DEBUG nova.network.neutron [req-b061fdde-c78e-4941-bfb3-cb6abfda74d9 req-63522f50-6cdc-4830-ab3b-8fcd8b88da2d service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Updating instance_info_cache with network_info: [{"id": "45420f68-e309-4569-8dac-28e16d9417d7", "address": "fa:16:3e:08:c2:70", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.203", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45420f68-e3", "ovs_interfaceid": "45420f68-e309-4569-8dac-28e16d9417d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.508638] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "cf52cee8-874e-44e8-a36e-49ac20f3e312" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.508924] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "cf52cee8-874e-44e8-a36e-49ac20f3e312" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.576424] env[62070]: INFO nova.compute.manager [-] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Took 1.46 seconds to deallocate network for instance. [ 889.807545] env[62070]: DEBUG oslo_vmware.api [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1122001, 'name': PowerOnVM_Task, 'duration_secs': 0.677574} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.807842] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 889.808093] env[62070]: INFO nova.compute.manager [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Took 9.67 seconds to spawn the instance on the hypervisor. [ 889.808327] env[62070]: DEBUG nova.compute.manager [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 889.809199] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4166b8dd-3e69-4835-8e45-5c6cbc429413 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.819526] env[62070]: DEBUG oslo_vmware.api [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Task: {'id': task-1122002, 'name': ReconfigVM_Task, 'duration_secs': 0.308887} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.821274] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Reconfigured VM instance instance-0000004d to attach disk [datastore2] d2cfcfac-4f15-4b16-9046-76722ee2e39b/d2cfcfac-4f15-4b16-9046-76722ee2e39b.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 889.826569] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b64523ee-87ec-4432-b94d-44ad5830a32a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.836936] env[62070]: DEBUG oslo_vmware.api [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Waiting for the task: (returnval){ [ 889.836936] env[62070]: value = "task-1122003" [ 889.836936] env[62070]: _type = "Task" [ 889.836936] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.840605] env[62070]: DEBUG oslo_concurrency.lockutils [None req-70c50811-0af7-45a0-93fe-9c08c68c95af tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Lock "e74fd58c-cfa8-45c4-8f02-96234b4a9192" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.431s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.854548] env[62070]: DEBUG oslo_vmware.api [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Task: {'id': task-1122003, 'name': Rename_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.861252] env[62070]: DEBUG nova.compute.manager [req-b0904f81-1490-4668-b0b6-ce2567f29458 req-ec1a1683-6778-4026-9fe9-822f784fa702 service nova] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Received event network-vif-plugged-cac26624-11c7-45a9-acb3-3e86b7232ab2 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 889.861553] env[62070]: DEBUG oslo_concurrency.lockutils [req-b0904f81-1490-4668-b0b6-ce2567f29458 req-ec1a1683-6778-4026-9fe9-822f784fa702 service nova] Acquiring lock "7dc27fe6-495f-498d-88fe-a99ddc19a21c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.861674] env[62070]: DEBUG oslo_concurrency.lockutils [req-b0904f81-1490-4668-b0b6-ce2567f29458 req-ec1a1683-6778-4026-9fe9-822f784fa702 service nova] Lock "7dc27fe6-495f-498d-88fe-a99ddc19a21c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.861944] env[62070]: DEBUG oslo_concurrency.lockutils [req-b0904f81-1490-4668-b0b6-ce2567f29458 req-ec1a1683-6778-4026-9fe9-822f784fa702 service nova] Lock "7dc27fe6-495f-498d-88fe-a99ddc19a21c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.862659] env[62070]: DEBUG nova.compute.manager [req-b0904f81-1490-4668-b0b6-ce2567f29458 req-ec1a1683-6778-4026-9fe9-822f784fa702 service nova] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] No waiting events found dispatching network-vif-plugged-cac26624-11c7-45a9-acb3-3e86b7232ab2 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 889.864356] env[62070]: WARNING nova.compute.manager [req-b0904f81-1490-4668-b0b6-ce2567f29458 req-ec1a1683-6778-4026-9fe9-822f784fa702 service nova] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Received unexpected event network-vif-plugged-cac26624-11c7-45a9-acb3-3e86b7232ab2 for instance with vm_state building and task_state spawning. [ 889.901260] env[62070]: DEBUG oslo_concurrency.lockutils [req-b061fdde-c78e-4941-bfb3-cb6abfda74d9 req-63522f50-6cdc-4830-ab3b-8fcd8b88da2d service nova] Releasing lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.936415] env[62070]: DEBUG nova.network.neutron [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Successfully updated port: cac26624-11c7-45a9-acb3-3e86b7232ab2 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 890.000944] env[62070]: DEBUG oslo_concurrency.lockutils [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquiring lock "963feecc-ff58-4cbb-8d6f-3f9035337087" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.001293] env[62070]: DEBUG oslo_concurrency.lockutils [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Lock "963feecc-ff58-4cbb-8d6f-3f9035337087" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.001977] env[62070]: DEBUG oslo_concurrency.lockutils [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquiring lock "963feecc-ff58-4cbb-8d6f-3f9035337087-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.001977] env[62070]: DEBUG oslo_concurrency.lockutils [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Lock "963feecc-ff58-4cbb-8d6f-3f9035337087-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.002165] env[62070]: DEBUG oslo_concurrency.lockutils [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Lock "963feecc-ff58-4cbb-8d6f-3f9035337087-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.004499] env[62070]: INFO nova.compute.manager [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Terminating instance [ 890.006619] env[62070]: DEBUG nova.compute.manager [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 890.006906] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 890.007923] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00cc7920-7a82-490e-9b2d-dbfca67631c8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.013010] env[62070]: DEBUG nova.compute.manager [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 890.021853] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 890.022114] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a9f77f63-209a-45a8-9b51-84900b06781c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.029433] env[62070]: DEBUG oslo_vmware.api [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 890.029433] env[62070]: value = "task-1122004" [ 890.029433] env[62070]: _type = "Task" [ 890.029433] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.037470] env[62070]: DEBUG oslo_vmware.api [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1122004, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.085248] env[62070]: DEBUG oslo_concurrency.lockutils [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.161312] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebac7aca-6ec6-415e-8b1c-fab0d16d3135 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.169920] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b607b8ea-95a1-42fa-9618-4308331f1fd2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.204035] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1046bef8-595b-48ce-9ee0-238f8a0be7dd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.212812] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f6da18-232a-4ba9-94e5-3e17e96678f2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.229192] env[62070]: DEBUG nova.compute.provider_tree [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 890.334953] env[62070]: INFO nova.compute.manager [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Took 37.93 seconds to build instance. [ 890.352198] env[62070]: DEBUG oslo_vmware.api [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Task: {'id': task-1122003, 'name': Rename_Task, 'duration_secs': 0.162515} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.353150] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 890.353413] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-116f28ed-521c-4139-9368-9499563541cb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.360960] env[62070]: DEBUG oslo_vmware.api [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Waiting for the task: (returnval){ [ 890.360960] env[62070]: value = "task-1122005" [ 890.360960] env[62070]: _type = "Task" [ 890.360960] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.370540] env[62070]: DEBUG oslo_vmware.api [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Task: {'id': task-1122005, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.438169] env[62070]: DEBUG oslo_concurrency.lockutils [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "refresh_cache-7dc27fe6-495f-498d-88fe-a99ddc19a21c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.438333] env[62070]: DEBUG oslo_concurrency.lockutils [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquired lock "refresh_cache-7dc27fe6-495f-498d-88fe-a99ddc19a21c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.439026] env[62070]: DEBUG nova.network.neutron [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 890.534202] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.540728] env[62070]: DEBUG oslo_vmware.api [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1122004, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.623445] env[62070]: DEBUG nova.compute.manager [req-bc2e27a3-fa68-4efc-bfdf-2a515f4a5cda req-5cb86e37-ee62-405c-a94d-ce678df0d885 service nova] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Received event network-vif-deleted-344dfad7-d2a7-4532-b01b-71b3a8e2b88a {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 890.752456] env[62070]: ERROR nova.scheduler.client.report [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] [req-876d94cd-b62c-4c89-bfb5-02cfcd1e9735] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 21c7c111-1b69-4468-b2c4-5dd96014fbd6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-876d94cd-b62c-4c89-bfb5-02cfcd1e9735"}]} [ 890.771008] env[62070]: DEBUG nova.scheduler.client.report [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Refreshing inventories for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 890.788795] env[62070]: DEBUG nova.scheduler.client.report [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Updating ProviderTree inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 890.789177] env[62070]: DEBUG nova.compute.provider_tree [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 890.803015] env[62070]: DEBUG nova.scheduler.client.report [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Refreshing aggregate associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, aggregates: None {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 890.826951] env[62070]: DEBUG nova.scheduler.client.report [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Refreshing trait associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 890.836899] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b245da95-b82c-4fb0-92b9-b49d402a2811 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Lock "559eee5b-0834-4dcf-a436-5e58644c7a3b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.456s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.871972] env[62070]: DEBUG oslo_vmware.api [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Task: {'id': task-1122005, 'name': PowerOnVM_Task, 'duration_secs': 0.480338} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.874564] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 890.874787] env[62070]: INFO nova.compute.manager [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Took 8.18 seconds to spawn the instance on the hypervisor. [ 890.874967] env[62070]: DEBUG nova.compute.manager [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 890.875948] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f74555-5eef-4779-a85d-3f11b9a7ec97 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.947283] env[62070]: INFO nova.compute.manager [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Rescuing [ 890.947341] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Acquiring lock "refresh_cache-559eee5b-0834-4dcf-a436-5e58644c7a3b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.947478] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Acquired lock "refresh_cache-559eee5b-0834-4dcf-a436-5e58644c7a3b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.947871] env[62070]: DEBUG nova.network.neutron [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 890.979171] env[62070]: DEBUG nova.network.neutron [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 891.039976] env[62070]: DEBUG oslo_vmware.api [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1122004, 'name': PowerOffVM_Task, 'duration_secs': 0.543061} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.040689] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 891.040689] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 891.040689] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ba2495c6-aa65-42de-a743-4fb7a64818e6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.148520] env[62070]: DEBUG nova.network.neutron [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Updating instance_info_cache with network_info: [{"id": "cac26624-11c7-45a9-acb3-3e86b7232ab2", "address": "fa:16:3e:8c:68:96", "network": {"id": "754f4ec8-0bc6-4726-8b88-1a4e1a326699", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-293486644-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a94db233e3a43dc9aa7cf887c6cb1f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcac26624-11", "ovs_interfaceid": "cac26624-11c7-45a9-acb3-3e86b7232ab2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.164621] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba83cb2-53ad-44da-ab11-1fdd44eb56f0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.173746] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d9e0f79-5439-4049-8939-76f9839a52cc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.205687] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72978558-3898-4525-99b3-55ad0b72a438 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.214284] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6c083f3-0ebc-4538-a800-56fff50f38cb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.228698] env[62070]: DEBUG nova.compute.provider_tree [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 891.397011] env[62070]: INFO nova.compute.manager [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Took 35.45 seconds to build instance. [ 891.630497] env[62070]: DEBUG nova.network.neutron [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Updating instance_info_cache with network_info: [{"id": "c24d842f-4fc0-417f-a913-acda1bd7c41b", "address": "fa:16:3e:9d:d6:f1", "network": {"id": "f4af58a1-85b4-4c02-814f-2896adf35801", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-859480877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "647582b6f4a048aea74c761f4e136a34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc24d842f-4f", "ovs_interfaceid": "c24d842f-4fc0-417f-a913-acda1bd7c41b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.651539] env[62070]: DEBUG oslo_concurrency.lockutils [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Releasing lock "refresh_cache-7dc27fe6-495f-498d-88fe-a99ddc19a21c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 891.651896] env[62070]: DEBUG nova.compute.manager [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Instance network_info: |[{"id": "cac26624-11c7-45a9-acb3-3e86b7232ab2", "address": "fa:16:3e:8c:68:96", "network": {"id": "754f4ec8-0bc6-4726-8b88-1a4e1a326699", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-293486644-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a94db233e3a43dc9aa7cf887c6cb1f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcac26624-11", "ovs_interfaceid": "cac26624-11c7-45a9-acb3-3e86b7232ab2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 891.652691] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:68:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d2742ba-c3af-4412-877d-c2811dfeba46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cac26624-11c7-45a9-acb3-3e86b7232ab2', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 891.662428] env[62070]: DEBUG oslo.service.loopingcall [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 891.662975] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 891.663280] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-39bace11-d332-499c-aded-c03a70ee4ae5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.684645] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 891.684645] env[62070]: value = "task-1122007" [ 891.684645] env[62070]: _type = "Task" [ 891.684645] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.693019] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122007, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.763667] env[62070]: DEBUG nova.scheduler.client.report [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Updated inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with generation 104 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 891.763989] env[62070]: DEBUG nova.compute.provider_tree [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Updating resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 generation from 104 to 105 during operation: update_inventory {{(pid=62070) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 891.764203] env[62070]: DEBUG nova.compute.provider_tree [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 891.898813] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f5579827-8e9e-49d8-9717-4039d0d0bfc5 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Lock "d2cfcfac-4f15-4b16-9046-76722ee2e39b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.969s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.951208] env[62070]: DEBUG nova.compute.manager [req-96bde5b3-05cf-4664-a93e-89cad334555a req-d46ef856-0932-496b-97fa-3f861142ce5e service nova] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Received event network-changed-cac26624-11c7-45a9-acb3-3e86b7232ab2 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 891.951486] env[62070]: DEBUG nova.compute.manager [req-96bde5b3-05cf-4664-a93e-89cad334555a req-d46ef856-0932-496b-97fa-3f861142ce5e service nova] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Refreshing instance network info cache due to event network-changed-cac26624-11c7-45a9-acb3-3e86b7232ab2. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 891.951765] env[62070]: DEBUG oslo_concurrency.lockutils [req-96bde5b3-05cf-4664-a93e-89cad334555a req-d46ef856-0932-496b-97fa-3f861142ce5e service nova] Acquiring lock "refresh_cache-7dc27fe6-495f-498d-88fe-a99ddc19a21c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.951979] env[62070]: DEBUG oslo_concurrency.lockutils [req-96bde5b3-05cf-4664-a93e-89cad334555a req-d46ef856-0932-496b-97fa-3f861142ce5e service nova] Acquired lock "refresh_cache-7dc27fe6-495f-498d-88fe-a99ddc19a21c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.952263] env[62070]: DEBUG nova.network.neutron [req-96bde5b3-05cf-4664-a93e-89cad334555a req-d46ef856-0932-496b-97fa-3f861142ce5e service nova] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Refreshing network info cache for port cac26624-11c7-45a9-acb3-3e86b7232ab2 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 892.133400] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Releasing lock "refresh_cache-559eee5b-0834-4dcf-a436-5e58644c7a3b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.195618] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122007, 'name': CreateVM_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.201175] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 892.201490] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 892.269471] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.978s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.272642] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.347s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.275053] env[62070]: INFO nova.compute.claims [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 892.294719] env[62070]: INFO nova.scheduler.client.report [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Deleted allocations for instance 4bba7448-69f7-4764-9ae6-eb6585f71515 [ 892.533518] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Acquiring lock "d2cfcfac-4f15-4b16-9046-76722ee2e39b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.533745] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Lock "d2cfcfac-4f15-4b16-9046-76722ee2e39b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.533945] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Acquiring lock "d2cfcfac-4f15-4b16-9046-76722ee2e39b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.534174] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Lock "d2cfcfac-4f15-4b16-9046-76722ee2e39b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.534357] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Lock "d2cfcfac-4f15-4b16-9046-76722ee2e39b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.536567] env[62070]: INFO nova.compute.manager [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Terminating instance [ 892.538424] env[62070]: DEBUG nova.compute.manager [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 892.538733] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 892.539797] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a35802-7725-4dff-b5c3-f8ed9f90445e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.548417] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 892.548713] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-89d14d43-2bfb-4196-b279-2d2d01fdc172 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.559144] env[62070]: DEBUG oslo_vmware.api [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Waiting for the task: (returnval){ [ 892.559144] env[62070]: value = "task-1122008" [ 892.559144] env[62070]: _type = "Task" [ 892.559144] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.566140] env[62070]: DEBUG oslo_vmware.api [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Task: {'id': task-1122008, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.671383] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 892.671645] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-024ae0d4-b490-46ee-9a01-06ecae86bc01 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.679825] env[62070]: DEBUG oslo_vmware.api [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Waiting for the task: (returnval){ [ 892.679825] env[62070]: value = "task-1122009" [ 892.679825] env[62070]: _type = "Task" [ 892.679825] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.689350] env[62070]: DEBUG oslo_vmware.api [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1122009, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.698318] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122007, 'name': CreateVM_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.709261] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 892.709458] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Starting heal instance info cache {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 892.732687] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 892.732948] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 892.733190] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Deleting the datastore file [datastore1] 963feecc-ff58-4cbb-8d6f-3f9035337087 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 892.733483] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d121317-2dda-400b-88b8-2e11d5838884 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.743336] env[62070]: DEBUG oslo_vmware.api [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for the task: (returnval){ [ 892.743336] env[62070]: value = "task-1122010" [ 892.743336] env[62070]: _type = "Task" [ 892.743336] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.756111] env[62070]: DEBUG oslo_vmware.api [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1122010, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.804320] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ee096c9d-cf7f-41b7-a4e4-dadeeb65a307 tempest-ServersNegativeTestMultiTenantJSON-2128819833 tempest-ServersNegativeTestMultiTenantJSON-2128819833-project-member] Lock "4bba7448-69f7-4764-9ae6-eb6585f71515" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.696s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.810207] env[62070]: DEBUG nova.network.neutron [req-96bde5b3-05cf-4664-a93e-89cad334555a req-d46ef856-0932-496b-97fa-3f861142ce5e service nova] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Updated VIF entry in instance network info cache for port cac26624-11c7-45a9-acb3-3e86b7232ab2. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 892.810207] env[62070]: DEBUG nova.network.neutron [req-96bde5b3-05cf-4664-a93e-89cad334555a req-d46ef856-0932-496b-97fa-3f861142ce5e service nova] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Updating instance_info_cache with network_info: [{"id": "cac26624-11c7-45a9-acb3-3e86b7232ab2", "address": "fa:16:3e:8c:68:96", "network": {"id": "754f4ec8-0bc6-4726-8b88-1a4e1a326699", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-293486644-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a94db233e3a43dc9aa7cf887c6cb1f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcac26624-11", "ovs_interfaceid": "cac26624-11c7-45a9-acb3-3e86b7232ab2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.067807] env[62070]: DEBUG oslo_vmware.api [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Task: {'id': task-1122008, 'name': PowerOffVM_Task, 'duration_secs': 0.227245} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.068246] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 893.068525] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 893.068882] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ddbe78e8-78f2-46de-a167-7834931c8a84 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.144467] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 893.144797] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 893.145116] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Deleting the datastore file [datastore2] d2cfcfac-4f15-4b16-9046-76722ee2e39b {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 893.145492] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-05b7cecf-aa89-4b5e-875f-da44b40aac8d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.157254] env[62070]: DEBUG oslo_vmware.api [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Waiting for the task: (returnval){ [ 893.157254] env[62070]: value = "task-1122012" [ 893.157254] env[62070]: _type = "Task" [ 893.157254] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.166702] env[62070]: DEBUG oslo_vmware.api [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Task: {'id': task-1122012, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.190736] env[62070]: DEBUG oslo_vmware.api [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1122009, 'name': PowerOffVM_Task, 'duration_secs': 0.247426} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.195114] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 893.195634] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a5eab2-b8c7-400c-9766-cf7873506f2e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.203707] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122007, 'name': CreateVM_Task, 'duration_secs': 1.300834} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.218020] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 893.220576] env[62070]: DEBUG oslo_concurrency.lockutils [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.220783] env[62070]: DEBUG oslo_concurrency.lockutils [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.221108] env[62070]: DEBUG oslo_concurrency.lockutils [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 893.221947] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad9a995-258b-40c2-8fb8-e598c70e58b8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.225238] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-240e6689-c657-4f63-a750-b41e2da90270 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.230896] env[62070]: DEBUG oslo_vmware.api [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 893.230896] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5268585c-cdeb-ad5f-7bd2-9f336c0b61df" [ 893.230896] env[62070]: _type = "Task" [ 893.230896] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.246626] env[62070]: DEBUG oslo_vmware.api [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5268585c-cdeb-ad5f-7bd2-9f336c0b61df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.255094] env[62070]: DEBUG oslo_vmware.api [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Task: {'id': task-1122010, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.332444} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.255621] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 893.255946] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 893.256138] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 893.256471] env[62070]: INFO nova.compute.manager [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Took 3.25 seconds to destroy the instance on the hypervisor. [ 893.256890] env[62070]: DEBUG oslo.service.loopingcall [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 893.258888] env[62070]: DEBUG nova.compute.manager [-] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 893.259097] env[62070]: DEBUG nova.network.neutron [-] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 893.271131] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 893.271551] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f11c5dfd-e25f-4e96-9589-36b7909e17d7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.280361] env[62070]: DEBUG oslo_vmware.api [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Waiting for the task: (returnval){ [ 893.280361] env[62070]: value = "task-1122013" [ 893.280361] env[62070]: _type = "Task" [ 893.280361] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.293308] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] VM already powered off {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 893.293308] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 893.293308] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.293473] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.293800] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 893.293887] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-792a9161-bb03-4ead-92e6-64cc0aad6d3b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.303250] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 893.303438] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 893.304295] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8cfe1264-0158-4bff-8a84-77caec6532bd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.310452] env[62070]: DEBUG oslo_vmware.api [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Waiting for the task: (returnval){ [ 893.310452] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5295d3e9-db36-40c0-0cb8-618b1a42764f" [ 893.310452] env[62070]: _type = "Task" [ 893.310452] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.316818] env[62070]: DEBUG oslo_concurrency.lockutils [req-96bde5b3-05cf-4664-a93e-89cad334555a req-d46ef856-0932-496b-97fa-3f861142ce5e service nova] Releasing lock "refresh_cache-7dc27fe6-495f-498d-88fe-a99ddc19a21c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.323680] env[62070]: DEBUG oslo_vmware.api [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5295d3e9-db36-40c0-0cb8-618b1a42764f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.669843] env[62070]: DEBUG oslo_vmware.api [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Task: {'id': task-1122012, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.485084} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.669843] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 893.670081] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 893.670226] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 893.670401] env[62070]: INFO nova.compute.manager [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 893.670630] env[62070]: DEBUG oslo.service.loopingcall [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 893.671366] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03eadab5-4dc2-4f15-8df4-71a009e3439a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.673953] env[62070]: DEBUG nova.compute.manager [-] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 893.674085] env[62070]: DEBUG nova.network.neutron [-] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 893.681817] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f11eb2d7-2c1b-4355-923c-6aa2efb5ed8c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.720069] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a8d2e6f-f4d1-4699-8518-a2f170a2c992 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.732020] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5e27f3-7334-49c5-8f86-2cb47da4931d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.754624] env[62070]: DEBUG oslo_vmware.api [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5268585c-cdeb-ad5f-7bd2-9f336c0b61df, 'name': SearchDatastore_Task, 'duration_secs': 0.017523} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.755128] env[62070]: DEBUG nova.compute.provider_tree [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 893.756592] env[62070]: DEBUG oslo_concurrency.lockutils [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.760018] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 893.760018] env[62070]: DEBUG oslo_concurrency.lockutils [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.797949] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.797949] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquired lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.798072] env[62070]: DEBUG nova.network.neutron [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Forcefully refreshing network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 893.823103] env[62070]: DEBUG oslo_vmware.api [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5295d3e9-db36-40c0-0cb8-618b1a42764f, 'name': SearchDatastore_Task, 'duration_secs': 0.032029} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.823958] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f7ad476-7d8d-4c0c-8412-76d6cc18e3f6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.830266] env[62070]: DEBUG oslo_vmware.api [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Waiting for the task: (returnval){ [ 893.830266] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]523361d4-690c-b0e4-4d6a-ba62ce91c7ad" [ 893.830266] env[62070]: _type = "Task" [ 893.830266] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.839393] env[62070]: DEBUG oslo_vmware.api [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]523361d4-690c-b0e4-4d6a-ba62ce91c7ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.998839] env[62070]: DEBUG nova.network.neutron [-] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.135063] env[62070]: DEBUG nova.compute.manager [req-7b8aaefe-d8bd-4b3a-ab53-08df0205fe72 req-179463e4-1ab1-4108-9106-5bb677a1a5b0 service nova] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Received event network-vif-deleted-eaa2c81a-41e9-4fe8-b2b0-9977156d1505 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 894.259479] env[62070]: DEBUG nova.scheduler.client.report [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 894.355853] env[62070]: DEBUG oslo_vmware.api [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]523361d4-690c-b0e4-4d6a-ba62ce91c7ad, 'name': SearchDatastore_Task, 'duration_secs': 0.041154} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.356488] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.356992] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 559eee5b-0834-4dcf-a436-5e58644c7a3b/43ea607c-7ece-4601-9b11-75c6a16aa7dd-rescue.vmdk. {{(pid=62070) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 894.357393] env[62070]: DEBUG oslo_concurrency.lockutils [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.357699] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 894.358029] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-305fa013-6665-4602-a727-2a2e26a1fd1d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.361579] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-39ce5138-45e8-4cb3-9e27-31f0c79a51f6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.371829] env[62070]: DEBUG oslo_vmware.api [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Waiting for the task: (returnval){ [ 894.371829] env[62070]: value = "task-1122014" [ 894.371829] env[62070]: _type = "Task" [ 894.371829] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.374271] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 894.374604] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 894.378796] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac2bf371-6c65-4bab-b696-b49f59778ff0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.388378] env[62070]: DEBUG oslo_vmware.api [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1122014, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.388703] env[62070]: DEBUG oslo_vmware.api [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 894.388703] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5244541c-9fe7-4eb4-fbaf-69ab9aabb475" [ 894.388703] env[62070]: _type = "Task" [ 894.388703] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.398497] env[62070]: DEBUG oslo_vmware.api [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5244541c-9fe7-4eb4-fbaf-69ab9aabb475, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.502712] env[62070]: INFO nova.compute.manager [-] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Took 1.24 seconds to deallocate network for instance. [ 894.529956] env[62070]: DEBUG nova.network.neutron [-] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.765797] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.494s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.766407] env[62070]: DEBUG nova.compute.manager [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 894.769356] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.974s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.769594] env[62070]: DEBUG nova.objects.instance [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lazy-loading 'resources' on Instance uuid 2368b649-f931-454c-92cc-971df4155d90 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 894.885960] env[62070]: DEBUG oslo_vmware.api [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1122014, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.905721] env[62070]: DEBUG oslo_vmware.api [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5244541c-9fe7-4eb4-fbaf-69ab9aabb475, 'name': SearchDatastore_Task, 'duration_secs': 0.016687} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.907222] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-788ba3b9-846d-4ea4-8bc1-8249a919280e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.915347] env[62070]: DEBUG oslo_vmware.api [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 894.915347] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52066075-58ef-8cd5-9cb1-57706ab2490f" [ 894.915347] env[62070]: _type = "Task" [ 894.915347] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.925388] env[62070]: DEBUG oslo_vmware.api [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52066075-58ef-8cd5-9cb1-57706ab2490f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.012244] env[62070]: DEBUG oslo_concurrency.lockutils [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.033077] env[62070]: INFO nova.compute.manager [-] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Took 1.36 seconds to deallocate network for instance. [ 895.274212] env[62070]: DEBUG nova.compute.utils [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 895.284807] env[62070]: DEBUG nova.compute.manager [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 895.285027] env[62070]: DEBUG nova.network.neutron [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 895.356522] env[62070]: DEBUG nova.policy [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fea934b415be47049cead35f1f9889b2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '636ad204169642dba3c8f24d4dba3d12', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 895.384752] env[62070]: DEBUG oslo_vmware.api [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1122014, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.765414} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.385862] env[62070]: INFO nova.virt.vmwareapi.ds_util [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 559eee5b-0834-4dcf-a436-5e58644c7a3b/43ea607c-7ece-4601-9b11-75c6a16aa7dd-rescue.vmdk. [ 895.390664] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ad0ade7-5e01-48b3-9237-63fe6a852b6c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.420940] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] 559eee5b-0834-4dcf-a436-5e58644c7a3b/43ea607c-7ece-4601-9b11-75c6a16aa7dd-rescue.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 895.424336] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23991461-ae67-4dec-86ea-bb41d7eb7e1d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.452314] env[62070]: DEBUG oslo_vmware.api [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52066075-58ef-8cd5-9cb1-57706ab2490f, 'name': SearchDatastore_Task, 'duration_secs': 0.062895} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.455913] env[62070]: DEBUG oslo_concurrency.lockutils [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.456441] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 7dc27fe6-495f-498d-88fe-a99ddc19a21c/7dc27fe6-495f-498d-88fe-a99ddc19a21c.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 895.456565] env[62070]: DEBUG oslo_vmware.api [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Waiting for the task: (returnval){ [ 895.456565] env[62070]: value = "task-1122015" [ 895.456565] env[62070]: _type = "Task" [ 895.456565] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.456973] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ebe810b-e649-4076-9aab-95c951c4c82a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.467608] env[62070]: DEBUG oslo_vmware.api [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1122015, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.469028] env[62070]: DEBUG oslo_vmware.api [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 895.469028] env[62070]: value = "task-1122016" [ 895.469028] env[62070]: _type = "Task" [ 895.469028] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.482544] env[62070]: DEBUG oslo_vmware.api [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122016, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.527940] env[62070]: DEBUG nova.network.neutron [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Updating instance_info_cache with network_info: [{"id": "a3ed0957-14c2-4144-8d45-f4a0e5cb45ab", "address": "fa:16:3e:3c:6a:3d", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3ed0957-14", "ovs_interfaceid": "a3ed0957-14c2-4144-8d45-f4a0e5cb45ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.540849] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.662471] env[62070]: DEBUG nova.network.neutron [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Successfully created port: 2aadf794-6fd0-4b81-a924-27e730f9c52c {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 895.720857] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa8540d6-98e6-4bbb-af38-8b912b92dc3a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.731906] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a6b9436-d934-47c6-bbc4-1700e4194457 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.765736] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c43ca6-05da-4cb1-b855-eb91f493c9b3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.777472] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e775c79d-1d2b-41a6-be19-171bd076af73 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.783089] env[62070]: DEBUG nova.compute.manager [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 895.796939] env[62070]: DEBUG nova.compute.provider_tree [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 895.970371] env[62070]: DEBUG oslo_vmware.api [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1122015, 'name': ReconfigVM_Task, 'duration_secs': 0.497064} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.974847] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Reconfigured VM instance instance-0000004c to attach disk [datastore2] 559eee5b-0834-4dcf-a436-5e58644c7a3b/43ea607c-7ece-4601-9b11-75c6a16aa7dd-rescue.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 895.975903] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67bea0ab-aab3-453e-8e37-24325658c8fa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.986463] env[62070]: DEBUG oslo_vmware.api [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122016, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.012198] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c3dc705-a88a-4276-9812-64618ebbc7c4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.031444] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Releasing lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.031774] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Updated the network info_cache for instance {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 896.033594] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 896.034067] env[62070]: DEBUG oslo_vmware.api [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Waiting for the task: (returnval){ [ 896.034067] env[62070]: value = "task-1122017" [ 896.034067] env[62070]: _type = "Task" [ 896.034067] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.034378] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 896.034528] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 896.039859] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 896.040145] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 896.040879] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 896.041458] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62070) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 896.041699] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 896.048275] env[62070]: DEBUG oslo_vmware.api [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1122017, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.188439] env[62070]: DEBUG nova.compute.manager [req-e2cdb5e8-da26-4bb1-ac08-b941bb0fea7a req-5bcbbc13-b064-47ad-81de-be6b2f633735 service nova] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Received event network-vif-deleted-4488d696-fdbf-44e5-9b57-3915f167f3ad {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 896.339452] env[62070]: DEBUG nova.scheduler.client.report [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Updated inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with generation 105 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 896.339749] env[62070]: DEBUG nova.compute.provider_tree [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Updating resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 generation from 105 to 106 during operation: update_inventory {{(pid=62070) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 896.339936] env[62070]: DEBUG nova.compute.provider_tree [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 896.479879] env[62070]: DEBUG oslo_vmware.api [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122016, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520389} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.480211] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 7dc27fe6-495f-498d-88fe-a99ddc19a21c/7dc27fe6-495f-498d-88fe-a99ddc19a21c.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 896.480440] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 896.480707] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bc14abdc-79a3-47a3-8d20-6738fefda883 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.491162] env[62070]: DEBUG oslo_vmware.api [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 896.491162] env[62070]: value = "task-1122018" [ 896.491162] env[62070]: _type = "Task" [ 896.491162] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.497573] env[62070]: DEBUG oslo_vmware.api [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122018, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.545018] env[62070]: DEBUG oslo_vmware.api [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1122017, 'name': ReconfigVM_Task, 'duration_secs': 0.47105} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.546579] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 896.548209] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ce88248-454c-412a-b679-3a1d94f1f113 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.552360] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.561909] env[62070]: DEBUG oslo_vmware.api [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Waiting for the task: (returnval){ [ 896.561909] env[62070]: value = "task-1122019" [ 896.561909] env[62070]: _type = "Task" [ 896.561909] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.572620] env[62070]: DEBUG oslo_vmware.api [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1122019, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.808371] env[62070]: DEBUG nova.compute.manager [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 896.835906] env[62070]: DEBUG nova.virt.hardware [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 896.836277] env[62070]: DEBUG nova.virt.hardware [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 896.836510] env[62070]: DEBUG nova.virt.hardware [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 896.836807] env[62070]: DEBUG nova.virt.hardware [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 896.837051] env[62070]: DEBUG nova.virt.hardware [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 896.837302] env[62070]: DEBUG nova.virt.hardware [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 896.837601] env[62070]: DEBUG nova.virt.hardware [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 896.837833] env[62070]: DEBUG nova.virt.hardware [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 896.838122] env[62070]: DEBUG nova.virt.hardware [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 896.838372] env[62070]: DEBUG nova.virt.hardware [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 896.838628] env[62070]: DEBUG nova.virt.hardware [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 896.839829] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d3fd50-5fb5-4041-a993-986f06e4622f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.845765] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.076s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.853248] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 25.415s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.853248] env[62070]: DEBUG nova.objects.instance [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62070) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 896.856730] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-608ddf48-a521-46f6-abd7-61e0f4055b9d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.880941] env[62070]: INFO nova.scheduler.client.report [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Deleted allocations for instance 2368b649-f931-454c-92cc-971df4155d90 [ 897.001021] env[62070]: DEBUG oslo_vmware.api [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122018, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07211} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.001364] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 897.002607] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba026b6-10b9-44a2-8624-b93107e0b4df {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.027620] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] 7dc27fe6-495f-498d-88fe-a99ddc19a21c/7dc27fe6-495f-498d-88fe-a99ddc19a21c.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 897.029074] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0294865c-d6aa-4080-abb4-5e22778f4dff {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.053441] env[62070]: DEBUG oslo_vmware.api [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 897.053441] env[62070]: value = "task-1122020" [ 897.053441] env[62070]: _type = "Task" [ 897.053441] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.063739] env[62070]: DEBUG oslo_vmware.api [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122020, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.072750] env[62070]: DEBUG oslo_vmware.api [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1122019, 'name': PowerOnVM_Task, 'duration_secs': 0.463505} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.073038] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 897.076195] env[62070]: DEBUG nova.compute.manager [None req-4e113b76-ae40-4d13-b36b-369b6e3bb728 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 897.077418] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de39f8b-e8c7-4b1c-9747-e09ef70d57a5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.393191] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6adc601d-ffcb-4677-99aa-bd0495d3c046 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "2368b649-f931-454c-92cc-971df4155d90" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.299s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.495245] env[62070]: DEBUG nova.compute.manager [req-37af9c32-ca66-426d-8f43-edc7c346210f req-3d5ca11b-c9eb-4287-a4c5-a2265d1e51b1 service nova] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Received event network-vif-plugged-2aadf794-6fd0-4b81-a924-27e730f9c52c {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 897.495502] env[62070]: DEBUG oslo_concurrency.lockutils [req-37af9c32-ca66-426d-8f43-edc7c346210f req-3d5ca11b-c9eb-4287-a4c5-a2265d1e51b1 service nova] Acquiring lock "e51d0146-502a-4ace-856e-b0dbcc11edea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.495732] env[62070]: DEBUG oslo_concurrency.lockutils [req-37af9c32-ca66-426d-8f43-edc7c346210f req-3d5ca11b-c9eb-4287-a4c5-a2265d1e51b1 service nova] Lock "e51d0146-502a-4ace-856e-b0dbcc11edea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.495906] env[62070]: DEBUG oslo_concurrency.lockutils [req-37af9c32-ca66-426d-8f43-edc7c346210f req-3d5ca11b-c9eb-4287-a4c5-a2265d1e51b1 service nova] Lock "e51d0146-502a-4ace-856e-b0dbcc11edea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.498323] env[62070]: DEBUG nova.compute.manager [req-37af9c32-ca66-426d-8f43-edc7c346210f req-3d5ca11b-c9eb-4287-a4c5-a2265d1e51b1 service nova] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] No waiting events found dispatching network-vif-plugged-2aadf794-6fd0-4b81-a924-27e730f9c52c {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 897.498559] env[62070]: WARNING nova.compute.manager [req-37af9c32-ca66-426d-8f43-edc7c346210f req-3d5ca11b-c9eb-4287-a4c5-a2265d1e51b1 service nova] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Received unexpected event network-vif-plugged-2aadf794-6fd0-4b81-a924-27e730f9c52c for instance with vm_state building and task_state spawning. [ 897.547837] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "10672096-00ba-4481-8ab3-085a185076db" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.548265] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "10672096-00ba-4481-8ab3-085a185076db" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.548530] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "10672096-00ba-4481-8ab3-085a185076db-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.548747] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "10672096-00ba-4481-8ab3-085a185076db-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.548926] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "10672096-00ba-4481-8ab3-085a185076db-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.551184] env[62070]: INFO nova.compute.manager [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Terminating instance [ 897.553175] env[62070]: DEBUG nova.compute.manager [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 897.553602] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 897.555012] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a4ab76-fe70-499e-9cef-ee87bd9d7449 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.569840] env[62070]: DEBUG oslo_vmware.api [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122020, 'name': ReconfigVM_Task, 'duration_secs': 0.294056} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.570598] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Reconfigured VM instance instance-0000004e to attach disk [datastore2] 7dc27fe6-495f-498d-88fe-a99ddc19a21c/7dc27fe6-495f-498d-88fe-a99ddc19a21c.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 897.571301] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 897.571516] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fb463ff6-07b7-404c-8a70-b0b2fd6630a8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.574058] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a68994b6-c9c9-492b-a288-5e4ae9f7f34c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.581179] env[62070]: DEBUG oslo_vmware.api [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 897.581179] env[62070]: value = "task-1122021" [ 897.581179] env[62070]: _type = "Task" [ 897.581179] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.582647] env[62070]: DEBUG oslo_vmware.api [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 897.582647] env[62070]: value = "task-1122022" [ 897.582647] env[62070]: _type = "Task" [ 897.582647] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.601309] env[62070]: DEBUG oslo_vmware.api [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122021, 'name': Rename_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.608029] env[62070]: DEBUG oslo_vmware.api [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1122022, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.868764] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5285cf1a-979a-4750-b882-909847338032 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.870011] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 26.393s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.870234] env[62070]: DEBUG nova.objects.instance [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62070) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 898.053242] env[62070]: DEBUG nova.network.neutron [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Successfully updated port: 2aadf794-6fd0-4b81-a924-27e730f9c52c {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 898.100008] env[62070]: DEBUG oslo_vmware.api [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1122022, 'name': PowerOffVM_Task, 'duration_secs': 0.218234} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.103011] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 898.103274] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 898.103585] env[62070]: DEBUG oslo_vmware.api [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122021, 'name': Rename_Task, 'duration_secs': 0.157265} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.104089] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a9832582-c9aa-4318-8441-d0bbe881eb37 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.105631] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 898.106720] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cb2ee8d6-27e1-4007-9ecc-33e5fbee9cac {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.113486] env[62070]: DEBUG nova.compute.manager [req-90db99e4-9266-4084-b8c9-03626bdd0cec req-93098e99-2615-44bd-bf7a-d86483212784 service nova] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Received event network-changed-2aadf794-6fd0-4b81-a924-27e730f9c52c {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 898.113486] env[62070]: DEBUG nova.compute.manager [req-90db99e4-9266-4084-b8c9-03626bdd0cec req-93098e99-2615-44bd-bf7a-d86483212784 service nova] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Refreshing instance network info cache due to event network-changed-2aadf794-6fd0-4b81-a924-27e730f9c52c. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 898.113486] env[62070]: DEBUG oslo_concurrency.lockutils [req-90db99e4-9266-4084-b8c9-03626bdd0cec req-93098e99-2615-44bd-bf7a-d86483212784 service nova] Acquiring lock "refresh_cache-e51d0146-502a-4ace-856e-b0dbcc11edea" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.113486] env[62070]: DEBUG oslo_concurrency.lockutils [req-90db99e4-9266-4084-b8c9-03626bdd0cec req-93098e99-2615-44bd-bf7a-d86483212784 service nova] Acquired lock "refresh_cache-e51d0146-502a-4ace-856e-b0dbcc11edea" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.113486] env[62070]: DEBUG nova.network.neutron [req-90db99e4-9266-4084-b8c9-03626bdd0cec req-93098e99-2615-44bd-bf7a-d86483212784 service nova] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Refreshing network info cache for port 2aadf794-6fd0-4b81-a924-27e730f9c52c {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 898.117136] env[62070]: DEBUG oslo_vmware.api [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 898.117136] env[62070]: value = "task-1122024" [ 898.117136] env[62070]: _type = "Task" [ 898.117136] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.126809] env[62070]: DEBUG oslo_vmware.api [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122024, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.190517] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 898.190747] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 898.190940] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Deleting the datastore file [datastore1] 10672096-00ba-4481-8ab3-085a185076db {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 898.191243] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e314686b-238d-441c-8802-f3a5b1167aaa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.198597] env[62070]: DEBUG oslo_vmware.api [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for the task: (returnval){ [ 898.198597] env[62070]: value = "task-1122025" [ 898.198597] env[62070]: _type = "Task" [ 898.198597] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.207202] env[62070]: DEBUG oslo_vmware.api [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1122025, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.540203] env[62070]: DEBUG nova.compute.manager [req-82c7ad91-1f91-4771-87bf-8aae653d20c6 req-6d262c33-e708-4abe-95fe-7e0db6631822 service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Received event network-changed-c24d842f-4fc0-417f-a913-acda1bd7c41b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 898.540466] env[62070]: DEBUG nova.compute.manager [req-82c7ad91-1f91-4771-87bf-8aae653d20c6 req-6d262c33-e708-4abe-95fe-7e0db6631822 service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Refreshing instance network info cache due to event network-changed-c24d842f-4fc0-417f-a913-acda1bd7c41b. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 898.540870] env[62070]: DEBUG oslo_concurrency.lockutils [req-82c7ad91-1f91-4771-87bf-8aae653d20c6 req-6d262c33-e708-4abe-95fe-7e0db6631822 service nova] Acquiring lock "refresh_cache-559eee5b-0834-4dcf-a436-5e58644c7a3b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.541048] env[62070]: DEBUG oslo_concurrency.lockutils [req-82c7ad91-1f91-4771-87bf-8aae653d20c6 req-6d262c33-e708-4abe-95fe-7e0db6631822 service nova] Acquired lock "refresh_cache-559eee5b-0834-4dcf-a436-5e58644c7a3b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.541231] env[62070]: DEBUG nova.network.neutron [req-82c7ad91-1f91-4771-87bf-8aae653d20c6 req-6d262c33-e708-4abe-95fe-7e0db6631822 service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Refreshing network info cache for port c24d842f-4fc0-417f-a913-acda1bd7c41b {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 898.559218] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Acquiring lock "refresh_cache-e51d0146-502a-4ace-856e-b0dbcc11edea" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.627272] env[62070]: DEBUG oslo_vmware.api [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122024, 'name': PowerOnVM_Task, 'duration_secs': 0.498696} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.627560] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 898.627780] env[62070]: INFO nova.compute.manager [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Took 9.37 seconds to spawn the instance on the hypervisor. [ 898.627964] env[62070]: DEBUG nova.compute.manager [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 898.628852] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-237660a3-a2be-47f3-9b75-aa5bb42fd145 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.659021] env[62070]: DEBUG nova.network.neutron [req-90db99e4-9266-4084-b8c9-03626bdd0cec req-93098e99-2615-44bd-bf7a-d86483212784 service nova] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 898.709027] env[62070]: DEBUG oslo_vmware.api [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Task: {'id': task-1122025, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.21155} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.710347] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 898.710624] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 898.710725] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 898.710950] env[62070]: INFO nova.compute.manager [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] [instance: 10672096-00ba-4481-8ab3-085a185076db] Took 1.16 seconds to destroy the instance on the hypervisor. [ 898.711203] env[62070]: DEBUG oslo.service.loopingcall [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 898.711487] env[62070]: DEBUG nova.compute.manager [-] [instance: 10672096-00ba-4481-8ab3-085a185076db] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 898.711487] env[62070]: DEBUG nova.network.neutron [-] [instance: 10672096-00ba-4481-8ab3-085a185076db] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 898.778046] env[62070]: DEBUG nova.network.neutron [req-90db99e4-9266-4084-b8c9-03626bdd0cec req-93098e99-2615-44bd-bf7a-d86483212784 service nova] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.880071] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3079b572-a07c-4d35-8725-af61f5219860 tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.881272] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.536s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 898.881518] env[62070]: DEBUG nova.objects.instance [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lazy-loading 'resources' on Instance uuid 359ae9f2-a907-459e-99b9-3e043d5d015f {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 899.148194] env[62070]: INFO nova.compute.manager [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Took 40.05 seconds to build instance. [ 899.281193] env[62070]: DEBUG oslo_concurrency.lockutils [req-90db99e4-9266-4084-b8c9-03626bdd0cec req-93098e99-2615-44bd-bf7a-d86483212784 service nova] Releasing lock "refresh_cache-e51d0146-502a-4ace-856e-b0dbcc11edea" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.281603] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Acquired lock "refresh_cache-e51d0146-502a-4ace-856e-b0dbcc11edea" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.281806] env[62070]: DEBUG nova.network.neutron [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 899.313662] env[62070]: DEBUG nova.network.neutron [req-82c7ad91-1f91-4771-87bf-8aae653d20c6 req-6d262c33-e708-4abe-95fe-7e0db6631822 service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Updated VIF entry in instance network info cache for port c24d842f-4fc0-417f-a913-acda1bd7c41b. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 899.314051] env[62070]: DEBUG nova.network.neutron [req-82c7ad91-1f91-4771-87bf-8aae653d20c6 req-6d262c33-e708-4abe-95fe-7e0db6631822 service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Updating instance_info_cache with network_info: [{"id": "c24d842f-4fc0-417f-a913-acda1bd7c41b", "address": "fa:16:3e:9d:d6:f1", "network": {"id": "f4af58a1-85b4-4c02-814f-2896adf35801", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-859480877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "647582b6f4a048aea74c761f4e136a34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc24d842f-4f", "ovs_interfaceid": "c24d842f-4fc0-417f-a913-acda1bd7c41b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.572591] env[62070]: DEBUG nova.network.neutron [-] [instance: 10672096-00ba-4481-8ab3-085a185076db] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.650425] env[62070]: DEBUG oslo_concurrency.lockutils [None req-98c8c84f-4cf9-474f-aae7-fd7e5cc6d19c tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "7dc27fe6-495f-498d-88fe-a99ddc19a21c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.569s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.707433] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93c0809-7314-4214-9a68-6e226b28614a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.715573] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f374e1a4-874e-46c0-91a2-ca9eb5df2d23 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.746073] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ccb0834-2fea-4d80-ad19-c13486eec5fd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.754404] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb703427-c03c-419f-97b3-619ffea1bcc5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.770441] env[62070]: DEBUG nova.compute.provider_tree [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 899.816886] env[62070]: DEBUG oslo_concurrency.lockutils [req-82c7ad91-1f91-4771-87bf-8aae653d20c6 req-6d262c33-e708-4abe-95fe-7e0db6631822 service nova] Releasing lock "refresh_cache-559eee5b-0834-4dcf-a436-5e58644c7a3b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.820833] env[62070]: DEBUG nova.network.neutron [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 900.056421] env[62070]: DEBUG nova.network.neutron [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Updating instance_info_cache with network_info: [{"id": "2aadf794-6fd0-4b81-a924-27e730f9c52c", "address": "fa:16:3e:f2:74:54", "network": {"id": "457ecf9f-e982-4d57-8e43-1646309a5a73", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-56531768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "636ad204169642dba3c8f24d4dba3d12", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf99dce-c773-48db-a2d9-00b8d0a7c75d", "external-id": "nsx-vlan-transportzone-248", "segmentation_id": 248, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2aadf794-6f", "ovs_interfaceid": "2aadf794-6fd0-4b81-a924-27e730f9c52c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.078253] env[62070]: INFO nova.compute.manager [-] [instance: 10672096-00ba-4481-8ab3-085a185076db] Took 1.37 seconds to deallocate network for instance. [ 900.273450] env[62070]: DEBUG nova.scheduler.client.report [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 900.297832] env[62070]: DEBUG nova.compute.manager [req-f0b849d5-ab42-4ba5-bdc3-27cd23508eb8 req-6d559fae-a61e-4af6-869f-8ca4813301e0 service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Received event network-changed-c24d842f-4fc0-417f-a913-acda1bd7c41b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 900.298084] env[62070]: DEBUG nova.compute.manager [req-f0b849d5-ab42-4ba5-bdc3-27cd23508eb8 req-6d559fae-a61e-4af6-869f-8ca4813301e0 service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Refreshing instance network info cache due to event network-changed-c24d842f-4fc0-417f-a913-acda1bd7c41b. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 900.298319] env[62070]: DEBUG oslo_concurrency.lockutils [req-f0b849d5-ab42-4ba5-bdc3-27cd23508eb8 req-6d559fae-a61e-4af6-869f-8ca4813301e0 service nova] Acquiring lock "refresh_cache-559eee5b-0834-4dcf-a436-5e58644c7a3b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.298511] env[62070]: DEBUG oslo_concurrency.lockutils [req-f0b849d5-ab42-4ba5-bdc3-27cd23508eb8 req-6d559fae-a61e-4af6-869f-8ca4813301e0 service nova] Acquired lock "refresh_cache-559eee5b-0834-4dcf-a436-5e58644c7a3b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.298639] env[62070]: DEBUG nova.network.neutron [req-f0b849d5-ab42-4ba5-bdc3-27cd23508eb8 req-6d559fae-a61e-4af6-869f-8ca4813301e0 service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Refreshing network info cache for port c24d842f-4fc0-417f-a913-acda1bd7c41b {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 900.558957] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Releasing lock "refresh_cache-e51d0146-502a-4ace-856e-b0dbcc11edea" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.559827] env[62070]: DEBUG nova.compute.manager [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Instance network_info: |[{"id": "2aadf794-6fd0-4b81-a924-27e730f9c52c", "address": "fa:16:3e:f2:74:54", "network": {"id": "457ecf9f-e982-4d57-8e43-1646309a5a73", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-56531768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "636ad204169642dba3c8f24d4dba3d12", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf99dce-c773-48db-a2d9-00b8d0a7c75d", "external-id": "nsx-vlan-transportzone-248", "segmentation_id": 248, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2aadf794-6f", "ovs_interfaceid": "2aadf794-6fd0-4b81-a924-27e730f9c52c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 900.560270] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:74:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aaf99dce-c773-48db-a2d9-00b8d0a7c75d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2aadf794-6fd0-4b81-a924-27e730f9c52c', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 900.568535] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Creating folder: Project (636ad204169642dba3c8f24d4dba3d12). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 900.569164] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4fce5c10-2815-458b-b120-e47e51ef98e8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.582830] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Created folder: Project (636ad204169642dba3c8f24d4dba3d12) in parent group-v245319. [ 900.583113] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Creating folder: Instances. Parent ref: group-v245452. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 900.583342] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0b59cfb2-77bc-4899-8f79-75937ed119ea {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.587488] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.598211] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Created folder: Instances in parent group-v245452. [ 900.598211] env[62070]: DEBUG oslo.service.loopingcall [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 900.598211] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 900.598211] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e51278f0-3910-4f84-a0ee-dc0e9cc79cf4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.622022] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 900.622022] env[62070]: value = "task-1122028" [ 900.622022] env[62070]: _type = "Task" [ 900.622022] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.628943] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122028, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.632695] env[62070]: DEBUG nova.compute.manager [req-0a16980d-5b47-4f1c-a5a9-1bb6f473ebca req-c7f4be80-4ed3-4d5a-9edb-91f48e8074d8 service nova] [instance: 10672096-00ba-4481-8ab3-085a185076db] Received event network-vif-deleted-d7ba9e32-c151-4f86-90ea-ba3a9dc7ea10 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 900.633044] env[62070]: DEBUG nova.compute.manager [req-0a16980d-5b47-4f1c-a5a9-1bb6f473ebca req-c7f4be80-4ed3-4d5a-9edb-91f48e8074d8 service nova] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Received event network-changed-cac26624-11c7-45a9-acb3-3e86b7232ab2 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 900.633388] env[62070]: DEBUG nova.compute.manager [req-0a16980d-5b47-4f1c-a5a9-1bb6f473ebca req-c7f4be80-4ed3-4d5a-9edb-91f48e8074d8 service nova] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Refreshing instance network info cache due to event network-changed-cac26624-11c7-45a9-acb3-3e86b7232ab2. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 900.633712] env[62070]: DEBUG oslo_concurrency.lockutils [req-0a16980d-5b47-4f1c-a5a9-1bb6f473ebca req-c7f4be80-4ed3-4d5a-9edb-91f48e8074d8 service nova] Acquiring lock "refresh_cache-7dc27fe6-495f-498d-88fe-a99ddc19a21c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.633989] env[62070]: DEBUG oslo_concurrency.lockutils [req-0a16980d-5b47-4f1c-a5a9-1bb6f473ebca req-c7f4be80-4ed3-4d5a-9edb-91f48e8074d8 service nova] Acquired lock "refresh_cache-7dc27fe6-495f-498d-88fe-a99ddc19a21c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.634279] env[62070]: DEBUG nova.network.neutron [req-0a16980d-5b47-4f1c-a5a9-1bb6f473ebca req-c7f4be80-4ed3-4d5a-9edb-91f48e8074d8 service nova] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Refreshing network info cache for port cac26624-11c7-45a9-acb3-3e86b7232ab2 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 900.783919] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.900s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.784519] env[62070]: DEBUG oslo_concurrency.lockutils [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 28.160s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.810975] env[62070]: INFO nova.scheduler.client.report [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Deleted allocations for instance 359ae9f2-a907-459e-99b9-3e043d5d015f [ 901.027118] env[62070]: DEBUG nova.network.neutron [req-f0b849d5-ab42-4ba5-bdc3-27cd23508eb8 req-6d559fae-a61e-4af6-869f-8ca4813301e0 service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Updated VIF entry in instance network info cache for port c24d842f-4fc0-417f-a913-acda1bd7c41b. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 901.027573] env[62070]: DEBUG nova.network.neutron [req-f0b849d5-ab42-4ba5-bdc3-27cd23508eb8 req-6d559fae-a61e-4af6-869f-8ca4813301e0 service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Updating instance_info_cache with network_info: [{"id": "c24d842f-4fc0-417f-a913-acda1bd7c41b", "address": "fa:16:3e:9d:d6:f1", "network": {"id": "f4af58a1-85b4-4c02-814f-2896adf35801", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-859480877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "647582b6f4a048aea74c761f4e136a34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc24d842f-4f", "ovs_interfaceid": "c24d842f-4fc0-417f-a913-acda1bd7c41b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.131354] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122028, 'name': CreateVM_Task, 'duration_secs': 0.4181} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.131539] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 901.132242] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.132457] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.132800] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 901.134034] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05662eba-6124-4859-9d85-a37f6abf1972 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.139382] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Waiting for the task: (returnval){ [ 901.139382] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]520796f7-6bca-67d6-26a5-6c367b45678d" [ 901.139382] env[62070]: _type = "Task" [ 901.139382] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.150226] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]520796f7-6bca-67d6-26a5-6c367b45678d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.290980] env[62070]: INFO nova.compute.claims [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 901.318351] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3a0a24c1-b1ea-482f-9c7a-723bd1e98ca4 tempest-ListImageFiltersTestJSON-300603416 tempest-ListImageFiltersTestJSON-300603416-project-member] Lock "359ae9f2-a907-459e-99b9-3e043d5d015f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.925s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.528931] env[62070]: DEBUG nova.network.neutron [req-0a16980d-5b47-4f1c-a5a9-1bb6f473ebca req-c7f4be80-4ed3-4d5a-9edb-91f48e8074d8 service nova] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Updated VIF entry in instance network info cache for port cac26624-11c7-45a9-acb3-3e86b7232ab2. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 901.528931] env[62070]: DEBUG nova.network.neutron [req-0a16980d-5b47-4f1c-a5a9-1bb6f473ebca req-c7f4be80-4ed3-4d5a-9edb-91f48e8074d8 service nova] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Updating instance_info_cache with network_info: [{"id": "cac26624-11c7-45a9-acb3-3e86b7232ab2", "address": "fa:16:3e:8c:68:96", "network": {"id": "754f4ec8-0bc6-4726-8b88-1a4e1a326699", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-293486644-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a94db233e3a43dc9aa7cf887c6cb1f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcac26624-11", "ovs_interfaceid": "cac26624-11c7-45a9-acb3-3e86b7232ab2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.533121] env[62070]: DEBUG oslo_concurrency.lockutils [req-f0b849d5-ab42-4ba5-bdc3-27cd23508eb8 req-6d559fae-a61e-4af6-869f-8ca4813301e0 service nova] Releasing lock "refresh_cache-559eee5b-0834-4dcf-a436-5e58644c7a3b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.533121] env[62070]: DEBUG nova.compute.manager [req-f0b849d5-ab42-4ba5-bdc3-27cd23508eb8 req-6d559fae-a61e-4af6-869f-8ca4813301e0 service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Received event network-changed-c24d842f-4fc0-417f-a913-acda1bd7c41b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 901.533121] env[62070]: DEBUG nova.compute.manager [req-f0b849d5-ab42-4ba5-bdc3-27cd23508eb8 req-6d559fae-a61e-4af6-869f-8ca4813301e0 service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Refreshing instance network info cache due to event network-changed-c24d842f-4fc0-417f-a913-acda1bd7c41b. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 901.533121] env[62070]: DEBUG oslo_concurrency.lockutils [req-f0b849d5-ab42-4ba5-bdc3-27cd23508eb8 req-6d559fae-a61e-4af6-869f-8ca4813301e0 service nova] Acquiring lock "refresh_cache-559eee5b-0834-4dcf-a436-5e58644c7a3b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.533121] env[62070]: DEBUG oslo_concurrency.lockutils [req-f0b849d5-ab42-4ba5-bdc3-27cd23508eb8 req-6d559fae-a61e-4af6-869f-8ca4813301e0 service nova] Acquired lock "refresh_cache-559eee5b-0834-4dcf-a436-5e58644c7a3b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.533121] env[62070]: DEBUG nova.network.neutron [req-f0b849d5-ab42-4ba5-bdc3-27cd23508eb8 req-6d559fae-a61e-4af6-869f-8ca4813301e0 service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Refreshing network info cache for port c24d842f-4fc0-417f-a913-acda1bd7c41b {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 901.654774] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]520796f7-6bca-67d6-26a5-6c367b45678d, 'name': SearchDatastore_Task, 'duration_secs': 0.009778} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.659280] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.659598] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 901.659855] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.660087] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.660370] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 901.660672] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f337be7c-6de9-4b68-bcbb-a35abd62e363 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.670387] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 901.671140] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 901.671937] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df764049-4897-43c2-8107-f47fa33af7aa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.678432] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Waiting for the task: (returnval){ [ 901.678432] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]528581aa-560c-f740-ab4f-ca7ebb184c7a" [ 901.678432] env[62070]: _type = "Task" [ 901.678432] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.687615] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]528581aa-560c-f740-ab4f-ca7ebb184c7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.799013] env[62070]: INFO nova.compute.resource_tracker [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Updating resource usage from migration 06237253-febe-4e76-9585-3c95ef9cd7fe [ 902.035562] env[62070]: DEBUG oslo_concurrency.lockutils [req-0a16980d-5b47-4f1c-a5a9-1bb6f473ebca req-c7f4be80-4ed3-4d5a-9edb-91f48e8074d8 service nova] Releasing lock "refresh_cache-7dc27fe6-495f-498d-88fe-a99ddc19a21c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.139992] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c839f8-b43d-4907-b5ba-b2945040c060 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.152235] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-461ce550-266f-4901-b728-a14c33f9c078 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.190282] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e76ba6a1-9262-4629-84d3-8f92b453b443 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.202173] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a914b5fd-129d-4e75-a026-9392a9e83f88 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.207179] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]528581aa-560c-f740-ab4f-ca7ebb184c7a, 'name': SearchDatastore_Task, 'duration_secs': 0.009667} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.208558] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd4ca9cb-11ff-42f9-9e5c-ddac4152892f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.220391] env[62070]: DEBUG nova.compute.provider_tree [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 902.228637] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Waiting for the task: (returnval){ [ 902.228637] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52e08d63-6ffc-4b38-a137-76af66811623" [ 902.228637] env[62070]: _type = "Task" [ 902.228637] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.238335] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52e08d63-6ffc-4b38-a137-76af66811623, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.359768] env[62070]: DEBUG nova.network.neutron [req-f0b849d5-ab42-4ba5-bdc3-27cd23508eb8 req-6d559fae-a61e-4af6-869f-8ca4813301e0 service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Updated VIF entry in instance network info cache for port c24d842f-4fc0-417f-a913-acda1bd7c41b. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 902.360184] env[62070]: DEBUG nova.network.neutron [req-f0b849d5-ab42-4ba5-bdc3-27cd23508eb8 req-6d559fae-a61e-4af6-869f-8ca4813301e0 service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Updating instance_info_cache with network_info: [{"id": "c24d842f-4fc0-417f-a913-acda1bd7c41b", "address": "fa:16:3e:9d:d6:f1", "network": {"id": "f4af58a1-85b4-4c02-814f-2896adf35801", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-859480877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "647582b6f4a048aea74c761f4e136a34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc24d842f-4f", "ovs_interfaceid": "c24d842f-4fc0-417f-a913-acda1bd7c41b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.421399] env[62070]: DEBUG nova.compute.manager [req-07e217cd-5158-4b11-a859-cd7af736cc54 req-a9ac2022-9feb-4f45-990a-4b486149db5f service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Received event network-changed-c24d842f-4fc0-417f-a913-acda1bd7c41b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 902.421710] env[62070]: DEBUG nova.compute.manager [req-07e217cd-5158-4b11-a859-cd7af736cc54 req-a9ac2022-9feb-4f45-990a-4b486149db5f service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Refreshing instance network info cache due to event network-changed-c24d842f-4fc0-417f-a913-acda1bd7c41b. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 902.421788] env[62070]: DEBUG oslo_concurrency.lockutils [req-07e217cd-5158-4b11-a859-cd7af736cc54 req-a9ac2022-9feb-4f45-990a-4b486149db5f service nova] Acquiring lock "refresh_cache-559eee5b-0834-4dcf-a436-5e58644c7a3b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.724388] env[62070]: DEBUG nova.scheduler.client.report [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 902.741083] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52e08d63-6ffc-4b38-a137-76af66811623, 'name': SearchDatastore_Task, 'duration_secs': 0.038911} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.741682] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.741834] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] e51d0146-502a-4ace-856e-b0dbcc11edea/e51d0146-502a-4ace-856e-b0dbcc11edea.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 902.742166] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b94b3a74-81a9-4078-b6fe-554f7c06868e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.750011] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Waiting for the task: (returnval){ [ 902.750011] env[62070]: value = "task-1122029" [ 902.750011] env[62070]: _type = "Task" [ 902.750011] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.759593] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Task: {'id': task-1122029, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.862706] env[62070]: DEBUG oslo_concurrency.lockutils [req-f0b849d5-ab42-4ba5-bdc3-27cd23508eb8 req-6d559fae-a61e-4af6-869f-8ca4813301e0 service nova] Releasing lock "refresh_cache-559eee5b-0834-4dcf-a436-5e58644c7a3b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.863215] env[62070]: DEBUG oslo_concurrency.lockutils [req-07e217cd-5158-4b11-a859-cd7af736cc54 req-a9ac2022-9feb-4f45-990a-4b486149db5f service nova] Acquired lock "refresh_cache-559eee5b-0834-4dcf-a436-5e58644c7a3b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.863435] env[62070]: DEBUG nova.network.neutron [req-07e217cd-5158-4b11-a859-cd7af736cc54 req-a9ac2022-9feb-4f45-990a-4b486149db5f service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Refreshing network info cache for port c24d842f-4fc0-417f-a913-acda1bd7c41b {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 903.205297] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Acquiring lock "559eee5b-0834-4dcf-a436-5e58644c7a3b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.205608] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Lock "559eee5b-0834-4dcf-a436-5e58644c7a3b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.205862] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Acquiring lock "559eee5b-0834-4dcf-a436-5e58644c7a3b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.206083] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Lock "559eee5b-0834-4dcf-a436-5e58644c7a3b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.206273] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Lock "559eee5b-0834-4dcf-a436-5e58644c7a3b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.209026] env[62070]: INFO nova.compute.manager [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Terminating instance [ 903.211501] env[62070]: DEBUG nova.compute.manager [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 903.211725] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 903.212571] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-320f3f76-92dd-4ee7-aa3a-af38a5893b19 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.221635] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 903.221875] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bfa7a423-2749-48b8-94b3-a61ea18e87ec {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.228601] env[62070]: DEBUG oslo_concurrency.lockutils [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.444s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.228813] env[62070]: INFO nova.compute.manager [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Migrating [ 903.235559] env[62070]: DEBUG oslo_vmware.api [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Waiting for the task: (returnval){ [ 903.235559] env[62070]: value = "task-1122030" [ 903.235559] env[62070]: _type = "Task" [ 903.235559] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.240050] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.732s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.240412] env[62070]: INFO nova.compute.claims [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 903.266470] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Task: {'id': task-1122029, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475642} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.267138] env[62070]: DEBUG oslo_vmware.api [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1122030, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.267909] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] e51d0146-502a-4ace-856e-b0dbcc11edea/e51d0146-502a-4ace-856e-b0dbcc11edea.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 903.268155] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 903.268418] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cafebffa-744b-4e92-9e7b-c740074dfa64 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.277626] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Waiting for the task: (returnval){ [ 903.277626] env[62070]: value = "task-1122031" [ 903.277626] env[62070]: _type = "Task" [ 903.277626] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.288019] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Task: {'id': task-1122031, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.667094] env[62070]: DEBUG nova.network.neutron [req-07e217cd-5158-4b11-a859-cd7af736cc54 req-a9ac2022-9feb-4f45-990a-4b486149db5f service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Updated VIF entry in instance network info cache for port c24d842f-4fc0-417f-a913-acda1bd7c41b. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 903.668485] env[62070]: DEBUG nova.network.neutron [req-07e217cd-5158-4b11-a859-cd7af736cc54 req-a9ac2022-9feb-4f45-990a-4b486149db5f service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Updating instance_info_cache with network_info: [{"id": "c24d842f-4fc0-417f-a913-acda1bd7c41b", "address": "fa:16:3e:9d:d6:f1", "network": {"id": "f4af58a1-85b4-4c02-814f-2896adf35801", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-859480877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "647582b6f4a048aea74c761f4e136a34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc24d842f-4f", "ovs_interfaceid": "c24d842f-4fc0-417f-a913-acda1bd7c41b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.754512] env[62070]: DEBUG oslo_concurrency.lockutils [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "refresh_cache-fb054a32-c1aa-4884-a087-da5ad34cf3c4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 903.754512] env[62070]: DEBUG oslo_concurrency.lockutils [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired lock "refresh_cache-fb054a32-c1aa-4884-a087-da5ad34cf3c4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.754512] env[62070]: DEBUG nova.network.neutron [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 903.755595] env[62070]: DEBUG oslo_vmware.api [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1122030, 'name': PowerOffVM_Task, 'duration_secs': 0.240251} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.756269] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 903.756559] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 903.757064] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4ff0d90a-968a-4750-9646-6829a016d519 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.787329] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Task: {'id': task-1122031, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061261} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.787674] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 903.788868] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce292f9-c9ad-46df-b554-c791803aae78 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.811961] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] e51d0146-502a-4ace-856e-b0dbcc11edea/e51d0146-502a-4ace-856e-b0dbcc11edea.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 903.812537] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b426162e-cd0b-4eef-9f9b-b9f5a065937d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.829375] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 903.829612] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 903.829798] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Deleting the datastore file [datastore2] 559eee5b-0834-4dcf-a436-5e58644c7a3b {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 903.830483] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-43300e60-7d72-4e5a-bfd2-b15d96f02b9c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.837897] env[62070]: DEBUG oslo_vmware.api [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Waiting for the task: (returnval){ [ 903.837897] env[62070]: value = "task-1122034" [ 903.837897] env[62070]: _type = "Task" [ 903.837897] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.838263] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Waiting for the task: (returnval){ [ 903.838263] env[62070]: value = "task-1122033" [ 903.838263] env[62070]: _type = "Task" [ 903.838263] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.850322] env[62070]: DEBUG oslo_vmware.api [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1122034, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.853480] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Task: {'id': task-1122033, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.173764] env[62070]: DEBUG oslo_concurrency.lockutils [req-07e217cd-5158-4b11-a859-cd7af736cc54 req-a9ac2022-9feb-4f45-990a-4b486149db5f service nova] Releasing lock "refresh_cache-559eee5b-0834-4dcf-a436-5e58644c7a3b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 904.361139] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Task: {'id': task-1122033, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.361527] env[62070]: DEBUG oslo_vmware.api [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1122034, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.497249] env[62070]: DEBUG nova.network.neutron [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Updating instance_info_cache with network_info: [{"id": "32f47285-7ff0-405e-849d-27e73999e359", "address": "fa:16:3e:dd:6e:ec", "network": {"id": "4888f989-958d-49ff-bf5a-06873e4cc624", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-906255456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d079c0ef3ed745fcaf69dc728dca4466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32f47285-7f", "ovs_interfaceid": "32f47285-7ff0-405e-849d-27e73999e359", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.559160] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c5270a5-47f1-40db-adc3-773dfa09a573 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.569349] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43e105cc-85b8-4c3a-b056-38360027050f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.599928] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0372a201-8efb-44e7-88d0-a172a8108fc9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.607960] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58ff180f-7271-494f-b6ce-a992b9796f6c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.621421] env[62070]: DEBUG nova.compute.provider_tree [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 904.744799] env[62070]: DEBUG oslo_concurrency.lockutils [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Acquiring lock "1440361b-d3b4-4c1c-995c-fe7ff99ee297" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.744799] env[62070]: DEBUG oslo_concurrency.lockutils [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Lock "1440361b-d3b4-4c1c-995c-fe7ff99ee297" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.853391] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Task: {'id': task-1122033, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.856469] env[62070]: DEBUG oslo_vmware.api [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Task: {'id': task-1122034, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.987646} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.856693] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 904.856883] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 904.857115] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 904.857303] env[62070]: INFO nova.compute.manager [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Took 1.65 seconds to destroy the instance on the hypervisor. [ 904.857548] env[62070]: DEBUG oslo.service.loopingcall [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 904.857760] env[62070]: DEBUG nova.compute.manager [-] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 904.857859] env[62070]: DEBUG nova.network.neutron [-] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 904.999801] env[62070]: DEBUG oslo_concurrency.lockutils [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Releasing lock "refresh_cache-fb054a32-c1aa-4884-a087-da5ad34cf3c4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.140397] env[62070]: DEBUG nova.compute.manager [req-31a7d9bc-aa0c-4e0e-a977-ec6149cb458e req-03acdb91-f47c-4ed9-9d4e-7422e4f24f53 service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Received event network-vif-deleted-c24d842f-4fc0-417f-a913-acda1bd7c41b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 905.140619] env[62070]: INFO nova.compute.manager [req-31a7d9bc-aa0c-4e0e-a977-ec6149cb458e req-03acdb91-f47c-4ed9-9d4e-7422e4f24f53 service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Neutron deleted interface c24d842f-4fc0-417f-a913-acda1bd7c41b; detaching it from the instance and deleting it from the info cache [ 905.140768] env[62070]: DEBUG nova.network.neutron [req-31a7d9bc-aa0c-4e0e-a977-ec6149cb458e req-03acdb91-f47c-4ed9-9d4e-7422e4f24f53 service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.143172] env[62070]: ERROR nova.scheduler.client.report [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [req-579dea89-2902-4a5a-b39d-4c55116a5e0b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 21c7c111-1b69-4468-b2c4-5dd96014fbd6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-579dea89-2902-4a5a-b39d-4c55116a5e0b"}]} [ 905.158882] env[62070]: DEBUG nova.scheduler.client.report [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Refreshing inventories for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 905.171830] env[62070]: DEBUG nova.scheduler.client.report [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Updating ProviderTree inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 905.172068] env[62070]: DEBUG nova.compute.provider_tree [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 905.181856] env[62070]: DEBUG nova.scheduler.client.report [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Refreshing aggregate associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, aggregates: None {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 905.200707] env[62070]: DEBUG nova.scheduler.client.report [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Refreshing trait associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 905.246615] env[62070]: DEBUG nova.compute.manager [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 905.349436] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Task: {'id': task-1122033, 'name': ReconfigVM_Task, 'duration_secs': 1.079153} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.351789] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Reconfigured VM instance instance-0000004f to attach disk [datastore2] e51d0146-502a-4ace-856e-b0dbcc11edea/e51d0146-502a-4ace-856e-b0dbcc11edea.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 905.352840] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-64020eeb-b835-447e-ab59-15b3a109a0ef {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.360561] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Waiting for the task: (returnval){ [ 905.360561] env[62070]: value = "task-1122035" [ 905.360561] env[62070]: _type = "Task" [ 905.360561] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.371481] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Task: {'id': task-1122035, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.478958] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1bb7243-604e-4986-8fd1-9003f8baf9e5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.487800] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbf185b9-1d10-473e-b754-33a467ae2934 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.524043] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-964ae4a2-b58a-43c3-ace2-93165a4cb45a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.531625] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc353436-ac46-4ca0-a1d4-2f8ce0181362 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.545581] env[62070]: DEBUG nova.compute.provider_tree [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 905.614481] env[62070]: DEBUG nova.network.neutron [-] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.643612] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f885d30a-158b-4484-b89c-b2a8faf93280 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.653955] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97de87a1-fcdd-4f8e-98ff-5466aa292ad6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.685479] env[62070]: DEBUG nova.compute.manager [req-31a7d9bc-aa0c-4e0e-a977-ec6149cb458e req-03acdb91-f47c-4ed9-9d4e-7422e4f24f53 service nova] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Detach interface failed, port_id=c24d842f-4fc0-417f-a913-acda1bd7c41b, reason: Instance 559eee5b-0834-4dcf-a436-5e58644c7a3b could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 905.764557] env[62070]: DEBUG oslo_concurrency.lockutils [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.872864] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Task: {'id': task-1122035, 'name': Rename_Task, 'duration_secs': 0.157372} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.873179] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 905.873541] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-473c6993-fa74-47d7-8b21-16e04641c046 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.881908] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Waiting for the task: (returnval){ [ 905.881908] env[62070]: value = "task-1122036" [ 905.881908] env[62070]: _type = "Task" [ 905.881908] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.889748] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Task: {'id': task-1122036, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.075344] env[62070]: DEBUG nova.scheduler.client.report [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Updated inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with generation 107 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 906.075660] env[62070]: DEBUG nova.compute.provider_tree [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Updating resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 generation from 107 to 108 during operation: update_inventory {{(pid=62070) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 906.075881] env[62070]: DEBUG nova.compute.provider_tree [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 906.117218] env[62070]: INFO nova.compute.manager [-] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Took 1.26 seconds to deallocate network for instance. [ 906.394164] env[62070]: DEBUG oslo_vmware.api [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Task: {'id': task-1122036, 'name': PowerOnVM_Task, 'duration_secs': 0.454738} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.394340] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 906.394537] env[62070]: INFO nova.compute.manager [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Took 9.59 seconds to spawn the instance on the hypervisor. [ 906.394807] env[62070]: DEBUG nova.compute.manager [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 906.395619] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac3750c9-2f82-49c5-b77b-03d01fd42319 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.534808] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c78ae6-353f-4631-afdc-c2cf69bb4b72 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.556866] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Updating instance 'fb054a32-c1aa-4884-a087-da5ad34cf3c4' progress to 0 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 906.583051] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.343s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.583051] env[62070]: DEBUG nova.compute.manager [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 906.586144] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.654s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.586144] env[62070]: DEBUG nova.objects.instance [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lazy-loading 'resources' on Instance uuid c16d175c-0b23-4f72-bdb0-844c6f80fd32 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 906.625035] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.912947] env[62070]: INFO nova.compute.manager [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Took 37.01 seconds to build instance. [ 907.065523] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 907.065847] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff7b70fa-f101-4823-bbd2-6f11b3d9b87a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.074068] env[62070]: DEBUG oslo_vmware.api [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 907.074068] env[62070]: value = "task-1122037" [ 907.074068] env[62070]: _type = "Task" [ 907.074068] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.083906] env[62070]: DEBUG oslo_vmware.api [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122037, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.088482] env[62070]: DEBUG nova.compute.utils [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 907.092721] env[62070]: DEBUG nova.compute.manager [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 907.092915] env[62070]: DEBUG nova.network.neutron [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 907.144500] env[62070]: DEBUG nova.policy [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'db9baf29d0b5489da2657286bfd695c0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91e246e32f29422e90fae974cfee9d8f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 907.292955] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Acquiring lock "e51d0146-502a-4ace-856e-b0dbcc11edea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.411689] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ab0ac3-cfc2-4470-9549-c8eaa64c00dc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.419382] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e9c130d3-ff3e-4741-9c51-ba262ee7aeaa tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Lock "e51d0146-502a-4ace-856e-b0dbcc11edea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.527s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.419513] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Lock "e51d0146-502a-4ace-856e-b0dbcc11edea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.127s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.419725] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Acquiring lock "e51d0146-502a-4ace-856e-b0dbcc11edea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.420531] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Lock "e51d0146-502a-4ace-856e-b0dbcc11edea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.420531] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Lock "e51d0146-502a-4ace-856e-b0dbcc11edea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.422722] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d71415ac-1898-4b2f-a6ba-be1a366c5f47 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.426021] env[62070]: INFO nova.compute.manager [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Terminating instance [ 907.427951] env[62070]: DEBUG nova.compute.manager [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 907.428167] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 907.429880] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94322bb9-cf83-40be-b58f-492e76032a21 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.458603] env[62070]: DEBUG nova.network.neutron [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Successfully created port: 4ee007f5-ba54-4f18-b072-e8c7119e6eb2 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 907.461483] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-306c2528-1361-4a07-8dd3-d239c2e103a1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.466362] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 907.466918] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d453d450-2f7e-4867-9ac1-cdd0427d46e6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.472081] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29800d4b-7a16-4cf7-bc5c-dbb063ff2d7b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.476646] env[62070]: DEBUG oslo_vmware.api [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Waiting for the task: (returnval){ [ 907.476646] env[62070]: value = "task-1122038" [ 907.476646] env[62070]: _type = "Task" [ 907.476646] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.488334] env[62070]: DEBUG nova.compute.provider_tree [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 907.495991] env[62070]: DEBUG oslo_vmware.api [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Task: {'id': task-1122038, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.585431] env[62070]: DEBUG oslo_vmware.api [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122037, 'name': PowerOffVM_Task, 'duration_secs': 0.218512} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.585700] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 907.585888] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Updating instance 'fb054a32-c1aa-4884-a087-da5ad34cf3c4' progress to 17 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 907.597036] env[62070]: DEBUG nova.compute.manager [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 907.988747] env[62070]: DEBUG oslo_vmware.api [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Task: {'id': task-1122038, 'name': PowerOffVM_Task, 'duration_secs': 0.176915} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.989154] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 907.989374] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 907.989691] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3801be30-e770-413c-bdbb-f5845d2bdd32 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.992570] env[62070]: DEBUG nova.scheduler.client.report [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 908.066601] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 908.066830] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 908.067178] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Deleting the datastore file [datastore2] e51d0146-502a-4ace-856e-b0dbcc11edea {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 908.067601] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6b22f1d6-47e7-427a-8d89-e200f7d4166f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.076907] env[62070]: DEBUG oslo_vmware.api [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Waiting for the task: (returnval){ [ 908.076907] env[62070]: value = "task-1122040" [ 908.076907] env[62070]: _type = "Task" [ 908.076907] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.085673] env[62070]: DEBUG oslo_vmware.api [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Task: {'id': task-1122040, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.091911] env[62070]: DEBUG nova.virt.hardware [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 908.092099] env[62070]: DEBUG nova.virt.hardware [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 908.093027] env[62070]: DEBUG nova.virt.hardware [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 908.093027] env[62070]: DEBUG nova.virt.hardware [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 908.093027] env[62070]: DEBUG nova.virt.hardware [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 908.093027] env[62070]: DEBUG nova.virt.hardware [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 908.093027] env[62070]: DEBUG nova.virt.hardware [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 908.093270] env[62070]: DEBUG nova.virt.hardware [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 908.093456] env[62070]: DEBUG nova.virt.hardware [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 908.094034] env[62070]: DEBUG nova.virt.hardware [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 908.094034] env[62070]: DEBUG nova.virt.hardware [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 908.103439] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc065755-405a-4303-b377-24b2be950bcb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.122336] env[62070]: DEBUG oslo_vmware.api [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 908.122336] env[62070]: value = "task-1122041" [ 908.122336] env[62070]: _type = "Task" [ 908.122336] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.132181] env[62070]: DEBUG oslo_vmware.api [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122041, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.498441] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.913s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.501178] env[62070]: DEBUG oslo_concurrency.lockutils [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.734s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.501424] env[62070]: DEBUG nova.objects.instance [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lazy-loading 'resources' on Instance uuid 62758a38-4819-4d5a-97ed-db6c9ceb97bf {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 908.527580] env[62070]: INFO nova.scheduler.client.report [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Deleted allocations for instance c16d175c-0b23-4f72-bdb0-844c6f80fd32 [ 908.588150] env[62070]: DEBUG oslo_vmware.api [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Task: {'id': task-1122040, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140109} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.588445] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 908.588639] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 908.588823] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 908.589012] env[62070]: INFO nova.compute.manager [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Took 1.16 seconds to destroy the instance on the hypervisor. [ 908.589359] env[62070]: DEBUG oslo.service.loopingcall [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 908.589602] env[62070]: DEBUG nova.compute.manager [-] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 908.589702] env[62070]: DEBUG nova.network.neutron [-] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 908.616562] env[62070]: DEBUG nova.compute.manager [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 908.632872] env[62070]: DEBUG oslo_vmware.api [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122041, 'name': ReconfigVM_Task, 'duration_secs': 0.245035} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.633251] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Updating instance 'fb054a32-c1aa-4884-a087-da5ad34cf3c4' progress to 33 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 908.648800] env[62070]: DEBUG nova.virt.hardware [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 908.649471] env[62070]: DEBUG nova.virt.hardware [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 908.649471] env[62070]: DEBUG nova.virt.hardware [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 908.649471] env[62070]: DEBUG nova.virt.hardware [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 908.649620] env[62070]: DEBUG nova.virt.hardware [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 908.649763] env[62070]: DEBUG nova.virt.hardware [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 908.649914] env[62070]: DEBUG nova.virt.hardware [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 908.650441] env[62070]: DEBUG nova.virt.hardware [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 908.650441] env[62070]: DEBUG nova.virt.hardware [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 908.650441] env[62070]: DEBUG nova.virt.hardware [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 908.650599] env[62070]: DEBUG nova.virt.hardware [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 908.651910] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49392b91-270a-461d-b918-7ffec1cb5b98 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.662937] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4745eb3-184b-404a-8efd-931aa25b7743 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.004644] env[62070]: DEBUG nova.objects.instance [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lazy-loading 'numa_topology' on Instance uuid 62758a38-4819-4d5a-97ed-db6c9ceb97bf {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 909.035505] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fb77bedb-24c5-4fa2-9eae-0bbdb5984976 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "c16d175c-0b23-4f72-bdb0-844c6f80fd32" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.031s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.087614] env[62070]: DEBUG nova.compute.manager [req-295c1831-fd69-45f8-917f-baf6dde0baae req-c46e1bb2-422f-41aa-8320-5310c971a1ef service nova] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Received event network-vif-deleted-2aadf794-6fd0-4b81-a924-27e730f9c52c {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 909.087819] env[62070]: INFO nova.compute.manager [req-295c1831-fd69-45f8-917f-baf6dde0baae req-c46e1bb2-422f-41aa-8320-5310c971a1ef service nova] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Neutron deleted interface 2aadf794-6fd0-4b81-a924-27e730f9c52c; detaching it from the instance and deleting it from the info cache [ 909.088127] env[62070]: DEBUG nova.network.neutron [req-295c1831-fd69-45f8-917f-baf6dde0baae req-c46e1bb2-422f-41aa-8320-5310c971a1ef service nova] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.108532] env[62070]: DEBUG nova.network.neutron [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Successfully updated port: 4ee007f5-ba54-4f18-b072-e8c7119e6eb2 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 909.143805] env[62070]: DEBUG nova.virt.hardware [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 909.144356] env[62070]: DEBUG nova.virt.hardware [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 909.144976] env[62070]: DEBUG nova.virt.hardware [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 909.145435] env[62070]: DEBUG nova.virt.hardware [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 909.145972] env[62070]: DEBUG nova.virt.hardware [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 909.146340] env[62070]: DEBUG nova.virt.hardware [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 909.146965] env[62070]: DEBUG nova.virt.hardware [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 909.147374] env[62070]: DEBUG nova.virt.hardware [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 909.147921] env[62070]: DEBUG nova.virt.hardware [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 909.148286] env[62070]: DEBUG nova.virt.hardware [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 909.148702] env[62070]: DEBUG nova.virt.hardware [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 909.158150] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Reconfiguring VM instance instance-00000048 to detach disk 2000 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 909.159294] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8a5185b-e88d-48cb-a7f7-888f3b17e0f8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.175314] env[62070]: DEBUG nova.compute.manager [req-92fb60fb-1127-4765-8292-5b1dbbde9b7f req-7e4705b8-a062-4952-86fb-fe9f00b22ffb service nova] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Received event network-vif-plugged-4ee007f5-ba54-4f18-b072-e8c7119e6eb2 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 909.175540] env[62070]: DEBUG oslo_concurrency.lockutils [req-92fb60fb-1127-4765-8292-5b1dbbde9b7f req-7e4705b8-a062-4952-86fb-fe9f00b22ffb service nova] Acquiring lock "3d699ce5-4d21-48f3-8f17-0cd49aebf109-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.175754] env[62070]: DEBUG oslo_concurrency.lockutils [req-92fb60fb-1127-4765-8292-5b1dbbde9b7f req-7e4705b8-a062-4952-86fb-fe9f00b22ffb service nova] Lock "3d699ce5-4d21-48f3-8f17-0cd49aebf109-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.175925] env[62070]: DEBUG oslo_concurrency.lockutils [req-92fb60fb-1127-4765-8292-5b1dbbde9b7f req-7e4705b8-a062-4952-86fb-fe9f00b22ffb service nova] Lock "3d699ce5-4d21-48f3-8f17-0cd49aebf109-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.176113] env[62070]: DEBUG nova.compute.manager [req-92fb60fb-1127-4765-8292-5b1dbbde9b7f req-7e4705b8-a062-4952-86fb-fe9f00b22ffb service nova] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] No waiting events found dispatching network-vif-plugged-4ee007f5-ba54-4f18-b072-e8c7119e6eb2 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 909.176854] env[62070]: WARNING nova.compute.manager [req-92fb60fb-1127-4765-8292-5b1dbbde9b7f req-7e4705b8-a062-4952-86fb-fe9f00b22ffb service nova] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Received unexpected event network-vif-plugged-4ee007f5-ba54-4f18-b072-e8c7119e6eb2 for instance with vm_state building and task_state spawning. [ 909.184204] env[62070]: DEBUG oslo_vmware.api [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 909.184204] env[62070]: value = "task-1122042" [ 909.184204] env[62070]: _type = "Task" [ 909.184204] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.194185] env[62070]: DEBUG oslo_vmware.api [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122042, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.355507] env[62070]: DEBUG nova.network.neutron [-] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.508540] env[62070]: DEBUG nova.objects.base [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Object Instance<62758a38-4819-4d5a-97ed-db6c9ceb97bf> lazy-loaded attributes: resources,numa_topology {{(pid=62070) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 909.591920] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-19f526ef-7141-467a-b37f-bcd1d41d80a0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.604713] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b5576a6-adf3-4a82-8c02-9d42e28493c9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.618620] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "refresh_cache-3d699ce5-4d21-48f3-8f17-0cd49aebf109" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 909.618762] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired lock "refresh_cache-3d699ce5-4d21-48f3-8f17-0cd49aebf109" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.619075] env[62070]: DEBUG nova.network.neutron [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 909.642510] env[62070]: DEBUG nova.compute.manager [req-295c1831-fd69-45f8-917f-baf6dde0baae req-c46e1bb2-422f-41aa-8320-5310c971a1ef service nova] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Detach interface failed, port_id=2aadf794-6fd0-4b81-a924-27e730f9c52c, reason: Instance e51d0146-502a-4ace-856e-b0dbcc11edea could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 909.694650] env[62070]: DEBUG oslo_vmware.api [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122042, 'name': ReconfigVM_Task, 'duration_secs': 0.214186} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.694938] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Reconfigured VM instance instance-00000048 to detach disk 2000 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 909.695937] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013c6ba2-5e4a-49a7-8393-cc7e6423539b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.722449] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] fb054a32-c1aa-4884-a087-da5ad34cf3c4/fb054a32-c1aa-4884-a087-da5ad34cf3c4.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 909.722779] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48fd5c29-54c3-4f8e-a085-74be172cef76 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.744461] env[62070]: DEBUG oslo_vmware.api [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 909.744461] env[62070]: value = "task-1122043" [ 909.744461] env[62070]: _type = "Task" [ 909.744461] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.755383] env[62070]: DEBUG oslo_vmware.api [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122043, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.822801] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6416ae76-68d1-4116-93bc-f6daff596cc3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.830636] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4bd78e4-a0b8-432c-ad5c-a901b9ed5a47 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.861429] env[62070]: INFO nova.compute.manager [-] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Took 1.27 seconds to deallocate network for instance. [ 909.865897] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2c5685-00f1-4f91-b5b7-ec6e1384f2b1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.875430] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-447fe920-4ee8-43d6-b61e-1f2bc7fa3642 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.891794] env[62070]: DEBUG nova.compute.provider_tree [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.179679] env[62070]: DEBUG nova.network.neutron [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 910.258038] env[62070]: DEBUG oslo_vmware.api [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122043, 'name': ReconfigVM_Task, 'duration_secs': 0.27133} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.258478] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Reconfigured VM instance instance-00000048 to attach disk [datastore2] fb054a32-c1aa-4884-a087-da5ad34cf3c4/fb054a32-c1aa-4884-a087-da5ad34cf3c4.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 910.258610] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Updating instance 'fb054a32-c1aa-4884-a087-da5ad34cf3c4' progress to 50 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 910.365619] env[62070]: DEBUG nova.network.neutron [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Updating instance_info_cache with network_info: [{"id": "4ee007f5-ba54-4f18-b072-e8c7119e6eb2", "address": "fa:16:3e:10:de:89", "network": {"id": "516790be-56b8-409d-b1c0-a8683a45a9ec", "bridge": "br-int", "label": "tempest-ServersTestJSON-693737631-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91e246e32f29422e90fae974cfee9d8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ee007f5-ba", "ovs_interfaceid": "4ee007f5-ba54-4f18-b072-e8c7119e6eb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.372587] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.394550] env[62070]: DEBUG nova.scheduler.client.report [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 910.768108] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81cc25ac-a36a-4482-a068-ff426bf354e1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.787599] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40126e6a-8c51-4d27-a428-491a947e7688 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.805049] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Updating instance 'fb054a32-c1aa-4884-a087-da5ad34cf3c4' progress to 67 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 910.870526] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Releasing lock "refresh_cache-3d699ce5-4d21-48f3-8f17-0cd49aebf109" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.870817] env[62070]: DEBUG nova.compute.manager [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Instance network_info: |[{"id": "4ee007f5-ba54-4f18-b072-e8c7119e6eb2", "address": "fa:16:3e:10:de:89", "network": {"id": "516790be-56b8-409d-b1c0-a8683a45a9ec", "bridge": "br-int", "label": "tempest-ServersTestJSON-693737631-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91e246e32f29422e90fae974cfee9d8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ee007f5-ba", "ovs_interfaceid": "4ee007f5-ba54-4f18-b072-e8c7119e6eb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 910.871280] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:10:de:89', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '359850cc-b061-4c9c-a61c-eb42e0f7c359', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4ee007f5-ba54-4f18-b072-e8c7119e6eb2', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 910.878963] env[62070]: DEBUG oslo.service.loopingcall [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 910.879470] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 910.879711] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-51900a94-d124-421c-9d9a-242ebb25a5b6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.901501] env[62070]: DEBUG oslo_concurrency.lockutils [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.400s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.903900] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 910.903900] env[62070]: value = "task-1122044" [ 910.903900] env[62070]: _type = "Task" [ 910.903900] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.904273] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 28.061s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.904482] env[62070]: DEBUG nova.objects.instance [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62070) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 910.918849] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122044, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.193441] env[62070]: DEBUG nova.compute.manager [req-d2a4e6e3-7bcf-47e4-bc15-b835958f14c0 req-02f90af3-c72a-477e-98b9-497ca28a87a3 service nova] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Received event network-changed-4ee007f5-ba54-4f18-b072-e8c7119e6eb2 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 911.193693] env[62070]: DEBUG nova.compute.manager [req-d2a4e6e3-7bcf-47e4-bc15-b835958f14c0 req-02f90af3-c72a-477e-98b9-497ca28a87a3 service nova] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Refreshing instance network info cache due to event network-changed-4ee007f5-ba54-4f18-b072-e8c7119e6eb2. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 911.193934] env[62070]: DEBUG oslo_concurrency.lockutils [req-d2a4e6e3-7bcf-47e4-bc15-b835958f14c0 req-02f90af3-c72a-477e-98b9-497ca28a87a3 service nova] Acquiring lock "refresh_cache-3d699ce5-4d21-48f3-8f17-0cd49aebf109" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.194134] env[62070]: DEBUG oslo_concurrency.lockutils [req-d2a4e6e3-7bcf-47e4-bc15-b835958f14c0 req-02f90af3-c72a-477e-98b9-497ca28a87a3 service nova] Acquired lock "refresh_cache-3d699ce5-4d21-48f3-8f17-0cd49aebf109" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.194323] env[62070]: DEBUG nova.network.neutron [req-d2a4e6e3-7bcf-47e4-bc15-b835958f14c0 req-02f90af3-c72a-477e-98b9-497ca28a87a3 service nova] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Refreshing network info cache for port 4ee007f5-ba54-4f18-b072-e8c7119e6eb2 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 911.201086] env[62070]: DEBUG oslo_concurrency.lockutils [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "84c00e4a-20d3-4739-8535-e27076d85a89" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.201794] env[62070]: DEBUG oslo_concurrency.lockutils [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "84c00e4a-20d3-4739-8535-e27076d85a89" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.357125] env[62070]: DEBUG nova.network.neutron [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Port 32f47285-7ff0-405e-849d-27e73999e359 binding to destination host cpu-1 is already ACTIVE {{(pid=62070) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 911.416429] env[62070]: DEBUG oslo_concurrency.lockutils [None req-783a44c6-89e7-446a-8701-ffc7ee9343d8 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "62758a38-4819-4d5a-97ed-db6c9ceb97bf" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 51.003s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.418136] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4ed0d70b-94fb-4c1c-826a-6f09c66aec65 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "62758a38-4819-4d5a-97ed-db6c9ceb97bf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 28.783s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.418136] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4ed0d70b-94fb-4c1c-826a-6f09c66aec65 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "62758a38-4819-4d5a-97ed-db6c9ceb97bf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.418136] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4ed0d70b-94fb-4c1c-826a-6f09c66aec65 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "62758a38-4819-4d5a-97ed-db6c9ceb97bf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.418341] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4ed0d70b-94fb-4c1c-826a-6f09c66aec65 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "62758a38-4819-4d5a-97ed-db6c9ceb97bf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.423260] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122044, 'name': CreateVM_Task, 'duration_secs': 0.347155} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.424122] env[62070]: INFO nova.compute.manager [None req-4ed0d70b-94fb-4c1c-826a-6f09c66aec65 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Terminating instance [ 911.425396] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 911.426174] env[62070]: DEBUG nova.compute.manager [None req-4ed0d70b-94fb-4c1c-826a-6f09c66aec65 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 911.426375] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4ed0d70b-94fb-4c1c-826a-6f09c66aec65 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 911.427050] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.427232] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.427599] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 911.427771] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-13ccef5d-f6ec-4e5a-9747-8bb959e2add4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.429993] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2156b017-d7a3-42d1-889c-2a975f9cbdb0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.436779] env[62070]: DEBUG oslo_vmware.api [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 911.436779] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52f52093-1ac3-e61d-4413-345b76002a73" [ 911.436779] env[62070]: _type = "Task" [ 911.436779] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.443092] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75edb860-a028-4405-ad39-fcfe6183156c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.459448] env[62070]: DEBUG oslo_vmware.api [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52f52093-1ac3-e61d-4413-345b76002a73, 'name': SearchDatastore_Task, 'duration_secs': 0.012486} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.459764] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 911.460012] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 911.460259] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.460412] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.460594] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 911.460848] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f903afb9-4431-4ff8-9b25-42bed302133a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.473715] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-4ed0d70b-94fb-4c1c-826a-6f09c66aec65 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 62758a38-4819-4d5a-97ed-db6c9ceb97bf could not be found. [ 911.473955] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4ed0d70b-94fb-4c1c-826a-6f09c66aec65 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 911.474105] env[62070]: INFO nova.compute.manager [None req-4ed0d70b-94fb-4c1c-826a-6f09c66aec65 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Took 0.05 seconds to destroy the instance on the hypervisor. [ 911.474415] env[62070]: DEBUG oslo.service.loopingcall [None req-4ed0d70b-94fb-4c1c-826a-6f09c66aec65 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 911.475548] env[62070]: DEBUG nova.compute.manager [-] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 911.475648] env[62070]: DEBUG nova.network.neutron [-] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 911.477303] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 911.477483] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 911.478206] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-315ddbd0-3e7a-42b6-8d4e-923661d81b99 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.484471] env[62070]: DEBUG oslo_vmware.api [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 911.484471] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52136e40-38cd-3e62-18d7-740df3f8fe77" [ 911.484471] env[62070]: _type = "Task" [ 911.484471] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.492780] env[62070]: DEBUG oslo_vmware.api [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52136e40-38cd-3e62-18d7-740df3f8fe77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.705213] env[62070]: DEBUG nova.compute.manager [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 911.921936] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1e327dae-ecdb-4e8f-89d3-84ae5f947ccb tempest-ServersAdmin275Test-485220002 tempest-ServersAdmin275Test-485220002-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.923339] env[62070]: DEBUG oslo_concurrency.lockutils [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.166s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.923581] env[62070]: DEBUG nova.objects.instance [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lazy-loading 'resources' on Instance uuid 58146b84-7589-4f21-bdab-605cee535e55 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 911.974068] env[62070]: DEBUG nova.network.neutron [req-d2a4e6e3-7bcf-47e4-bc15-b835958f14c0 req-02f90af3-c72a-477e-98b9-497ca28a87a3 service nova] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Updated VIF entry in instance network info cache for port 4ee007f5-ba54-4f18-b072-e8c7119e6eb2. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 911.974566] env[62070]: DEBUG nova.network.neutron [req-d2a4e6e3-7bcf-47e4-bc15-b835958f14c0 req-02f90af3-c72a-477e-98b9-497ca28a87a3 service nova] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Updating instance_info_cache with network_info: [{"id": "4ee007f5-ba54-4f18-b072-e8c7119e6eb2", "address": "fa:16:3e:10:de:89", "network": {"id": "516790be-56b8-409d-b1c0-a8683a45a9ec", "bridge": "br-int", "label": "tempest-ServersTestJSON-693737631-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91e246e32f29422e90fae974cfee9d8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ee007f5-ba", "ovs_interfaceid": "4ee007f5-ba54-4f18-b072-e8c7119e6eb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.995309] env[62070]: DEBUG oslo_vmware.api [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52136e40-38cd-3e62-18d7-740df3f8fe77, 'name': SearchDatastore_Task, 'duration_secs': 0.009464} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.996099] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79b5e57b-5e33-4f8d-b846-85142bbb8855 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.002505] env[62070]: DEBUG oslo_vmware.api [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 912.002505] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52aee9ba-4640-83d9-584e-fc182721d56b" [ 912.002505] env[62070]: _type = "Task" [ 912.002505] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.010965] env[62070]: DEBUG oslo_vmware.api [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52aee9ba-4640-83d9-584e-fc182721d56b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.203009] env[62070]: DEBUG nova.network.neutron [-] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.225965] env[62070]: DEBUG oslo_concurrency.lockutils [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.382454] env[62070]: DEBUG oslo_concurrency.lockutils [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "fb054a32-c1aa-4884-a087-da5ad34cf3c4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.382454] env[62070]: DEBUG oslo_concurrency.lockutils [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "fb054a32-c1aa-4884-a087-da5ad34cf3c4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.382454] env[62070]: DEBUG oslo_concurrency.lockutils [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "fb054a32-c1aa-4884-a087-da5ad34cf3c4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.477622] env[62070]: DEBUG oslo_concurrency.lockutils [req-d2a4e6e3-7bcf-47e4-bc15-b835958f14c0 req-02f90af3-c72a-477e-98b9-497ca28a87a3 service nova] Releasing lock "refresh_cache-3d699ce5-4d21-48f3-8f17-0cd49aebf109" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.517218] env[62070]: DEBUG oslo_vmware.api [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52aee9ba-4640-83d9-584e-fc182721d56b, 'name': SearchDatastore_Task, 'duration_secs': 0.009823} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.517475] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.517757] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 3d699ce5-4d21-48f3-8f17-0cd49aebf109/3d699ce5-4d21-48f3-8f17-0cd49aebf109.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 912.518050] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6efc3de2-0840-4ac9-8678-a14a148101aa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.526958] env[62070]: DEBUG oslo_vmware.api [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 912.526958] env[62070]: value = "task-1122045" [ 912.526958] env[62070]: _type = "Task" [ 912.526958] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.538925] env[62070]: DEBUG oslo_vmware.api [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122045, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.706010] env[62070]: INFO nova.compute.manager [-] [instance: 62758a38-4819-4d5a-97ed-db6c9ceb97bf] Took 1.23 seconds to deallocate network for instance. [ 912.724994] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb1cc24-856c-481b-b8ac-8af189791b33 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.734045] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43ad0071-d52c-4939-ab63-b335ffd7854a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.769535] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea597449-d425-479c-86b5-f6b6e7453cb2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.779529] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b8b4324-7849-48fa-b861-ad8813788366 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.795786] env[62070]: DEBUG nova.compute.provider_tree [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 913.039741] env[62070]: DEBUG oslo_vmware.api [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122045, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493307} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.040043] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 3d699ce5-4d21-48f3-8f17-0cd49aebf109/3d699ce5-4d21-48f3-8f17-0cd49aebf109.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 913.040343] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 913.040637] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2d8023a1-cb57-4441-b532-fecbd37f7f18 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.047431] env[62070]: DEBUG oslo_vmware.api [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 913.047431] env[62070]: value = "task-1122046" [ 913.047431] env[62070]: _type = "Task" [ 913.047431] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.056577] env[62070]: DEBUG oslo_vmware.api [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122046, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.315457] env[62070]: ERROR nova.scheduler.client.report [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [req-84d91f4e-79ba-48d5-b916-4cf551ea2930] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 21c7c111-1b69-4468-b2c4-5dd96014fbd6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-84d91f4e-79ba-48d5-b916-4cf551ea2930"}]} [ 913.331529] env[62070]: DEBUG nova.scheduler.client.report [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Refreshing inventories for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 913.344389] env[62070]: DEBUG nova.scheduler.client.report [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Updating ProviderTree inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 913.344605] env[62070]: DEBUG nova.compute.provider_tree [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 913.354594] env[62070]: DEBUG nova.scheduler.client.report [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Refreshing aggregate associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, aggregates: None {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 913.370907] env[62070]: DEBUG nova.scheduler.client.report [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Refreshing trait associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 913.419225] env[62070]: DEBUG oslo_concurrency.lockutils [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "refresh_cache-fb054a32-c1aa-4884-a087-da5ad34cf3c4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 913.419461] env[62070]: DEBUG oslo_concurrency.lockutils [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired lock "refresh_cache-fb054a32-c1aa-4884-a087-da5ad34cf3c4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.419675] env[62070]: DEBUG nova.network.neutron [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 913.558939] env[62070]: DEBUG oslo_vmware.api [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122046, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060275} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.561835] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 913.562507] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8319c76a-12c6-478a-87eb-030b8605c1ec {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.587580] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 3d699ce5-4d21-48f3-8f17-0cd49aebf109/3d699ce5-4d21-48f3-8f17-0cd49aebf109.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 913.590396] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3d81685-3f1c-4c4c-8896-8580cc598bad {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.610500] env[62070]: DEBUG oslo_vmware.api [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 913.610500] env[62070]: value = "task-1122047" [ 913.610500] env[62070]: _type = "Task" [ 913.610500] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.621643] env[62070]: DEBUG oslo_vmware.api [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122047, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.672623] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e95c88d-c878-4df0-b78b-aeeddc78e227 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.680684] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fea8174-170c-4c99-bedc-5abaf2a1f7c8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.715071] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae130482-34d8-4214-b65f-4f5d609c691b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.723877] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a19ea35-d909-4012-8858-5ffd337ecb61 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.740157] env[62070]: DEBUG nova.compute.provider_tree [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 913.741562] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4ed0d70b-94fb-4c1c-826a-6f09c66aec65 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "62758a38-4819-4d5a-97ed-db6c9ceb97bf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.324s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.121946] env[62070]: DEBUG oslo_vmware.api [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122047, 'name': ReconfigVM_Task, 'duration_secs': 0.298581} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.122252] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 3d699ce5-4d21-48f3-8f17-0cd49aebf109/3d699ce5-4d21-48f3-8f17-0cd49aebf109.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 914.123270] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1989cc0a-f1bb-4d91-a2fb-ce212397e024 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.132254] env[62070]: DEBUG oslo_vmware.api [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 914.132254] env[62070]: value = "task-1122048" [ 914.132254] env[62070]: _type = "Task" [ 914.132254] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.145135] env[62070]: DEBUG oslo_vmware.api [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122048, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.163577] env[62070]: DEBUG nova.network.neutron [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Updating instance_info_cache with network_info: [{"id": "32f47285-7ff0-405e-849d-27e73999e359", "address": "fa:16:3e:dd:6e:ec", "network": {"id": "4888f989-958d-49ff-bf5a-06873e4cc624", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-906255456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d079c0ef3ed745fcaf69dc728dca4466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32f47285-7f", "ovs_interfaceid": "32f47285-7ff0-405e-849d-27e73999e359", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.275503] env[62070]: DEBUG nova.scheduler.client.report [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Updated inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with generation 109 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 914.275812] env[62070]: DEBUG nova.compute.provider_tree [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Updating resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 generation from 109 to 110 during operation: update_inventory {{(pid=62070) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 914.276074] env[62070]: DEBUG nova.compute.provider_tree [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 914.642852] env[62070]: DEBUG oslo_vmware.api [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122048, 'name': Rename_Task, 'duration_secs': 0.151798} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.643166] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 914.643402] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e590dcc6-2e17-40d3-a00b-481689f5c568 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.650802] env[62070]: DEBUG oslo_vmware.api [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 914.650802] env[62070]: value = "task-1122049" [ 914.650802] env[62070]: _type = "Task" [ 914.650802] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.659141] env[62070]: DEBUG oslo_vmware.api [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122049, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.667408] env[62070]: DEBUG oslo_concurrency.lockutils [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Releasing lock "refresh_cache-fb054a32-c1aa-4884-a087-da5ad34cf3c4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.781713] env[62070]: DEBUG oslo_concurrency.lockutils [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.858s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.784370] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.588s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.785023] env[62070]: DEBUG nova.objects.instance [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Lazy-loading 'resources' on Instance uuid 61ab347d-1342-4f59-8955-10d575993b77 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 914.799803] env[62070]: INFO nova.scheduler.client.report [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Deleted allocations for instance 58146b84-7589-4f21-bdab-605cee535e55 [ 915.014859] env[62070]: DEBUG oslo_concurrency.lockutils [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "e850734f-c49c-46d7-87ab-b0d6bed89d9b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.015888] env[62070]: DEBUG oslo_concurrency.lockutils [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "e850734f-c49c-46d7-87ab-b0d6bed89d9b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.113583] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "2c58db1d-405f-4489-85db-c74723be4a8d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.113741] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "2c58db1d-405f-4489-85db-c74723be4a8d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.161229] env[62070]: DEBUG oslo_vmware.api [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122049, 'name': PowerOnVM_Task, 'duration_secs': 0.454612} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.161507] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 915.161823] env[62070]: INFO nova.compute.manager [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Took 6.54 seconds to spawn the instance on the hypervisor. [ 915.162677] env[62070]: DEBUG nova.compute.manager [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 915.162982] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb29ae67-49cb-4876-ba6e-b09e50d686f2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.195133] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d2b7d5c-d84d-42c6-8213-0563ce4be1fc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.214985] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c72a0e66-05dd-4c39-b648-251890c42868 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.225361] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Updating instance 'fb054a32-c1aa-4884-a087-da5ad34cf3c4' progress to 83 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 915.310747] env[62070]: DEBUG oslo_concurrency.lockutils [None req-61a3dc63-b62e-4c66-aaff-d9a9f2541f31 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "58146b84-7589-4f21-bdab-605cee535e55" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.588s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.518646] env[62070]: DEBUG nova.compute.manager [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 915.558827] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e2682f-90b7-4a83-beaa-4973d683e350 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.567264] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f0d3524-84ae-4029-bedb-f018bce26b12 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.599036] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ee38bf-e246-40c6-ba69-e14d42341aa9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.606900] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-064c4aa4-1cf9-49bb-976f-4d17dc107b36 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.621112] env[62070]: DEBUG nova.compute.manager [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 915.623803] env[62070]: DEBUG nova.compute.provider_tree [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 915.684421] env[62070]: INFO nova.compute.manager [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Took 42.20 seconds to build instance. [ 915.731236] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 915.731846] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a5ed62a6-c023-4786-a76f-4aa43fe531ec {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.740203] env[62070]: DEBUG oslo_vmware.api [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 915.740203] env[62070]: value = "task-1122050" [ 915.740203] env[62070]: _type = "Task" [ 915.740203] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.748498] env[62070]: DEBUG oslo_vmware.api [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122050, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.833549] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "328fbc92-8162-4e12-a02d-6e9cafe0c365" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.834073] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "328fbc92-8162-4e12-a02d-6e9cafe0c365" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.834426] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "328fbc92-8162-4e12-a02d-6e9cafe0c365-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.834767] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "328fbc92-8162-4e12-a02d-6e9cafe0c365-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.835384] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "328fbc92-8162-4e12-a02d-6e9cafe0c365-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.838897] env[62070]: INFO nova.compute.manager [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Terminating instance [ 915.841358] env[62070]: DEBUG nova.compute.manager [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 915.841650] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 915.842833] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a9df62-2b46-487f-8b63-6a9d71b37db6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.853123] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 915.853814] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0df382d9-f809-480b-83b5-6cfc372d3653 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.862111] env[62070]: DEBUG oslo_vmware.api [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 915.862111] env[62070]: value = "task-1122051" [ 915.862111] env[62070]: _type = "Task" [ 915.862111] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.873444] env[62070]: DEBUG oslo_vmware.api [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122051, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.044783] env[62070]: DEBUG oslo_concurrency.lockutils [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.127996] env[62070]: DEBUG nova.scheduler.client.report [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 916.146798] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.185325] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ed858abe-6e77-488f-b72a-5a07c1e7c264 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "3d699ce5-4d21-48f3-8f17-0cd49aebf109" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.715s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.253080] env[62070]: DEBUG oslo_vmware.api [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122050, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.288339] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "519cad6a-ebe0-42db-a19e-27249b83436e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.288656] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "519cad6a-ebe0-42db-a19e-27249b83436e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.372117] env[62070]: DEBUG oslo_vmware.api [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122051, 'name': PowerOffVM_Task, 'duration_secs': 0.186097} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.372385] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 916.372560] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 916.372806] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2800bdad-2a23-4e71-9f6f-850ebe2d940b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.455935] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 916.456158] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 916.456357] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Deleting the datastore file [datastore1] 328fbc92-8162-4e12-a02d-6e9cafe0c365 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 916.456648] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-135e7541-cffe-4aca-8c86-e245317a0aa6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.463578] env[62070]: DEBUG oslo_vmware.api [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 916.463578] env[62070]: value = "task-1122053" [ 916.463578] env[62070]: _type = "Task" [ 916.463578] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.472662] env[62070]: DEBUG oslo_vmware.api [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122053, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.622866] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "3d699ce5-4d21-48f3-8f17-0cd49aebf109" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.623198] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "3d699ce5-4d21-48f3-8f17-0cd49aebf109" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.623463] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "3d699ce5-4d21-48f3-8f17-0cd49aebf109-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.623667] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "3d699ce5-4d21-48f3-8f17-0cd49aebf109-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.623849] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "3d699ce5-4d21-48f3-8f17-0cd49aebf109-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.626084] env[62070]: INFO nova.compute.manager [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Terminating instance [ 916.627985] env[62070]: DEBUG nova.compute.manager [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 916.628235] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 916.629078] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c684769b-665e-45e4-903e-57909e07bf4b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.637286] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.853s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.639216] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 916.639721] env[62070]: DEBUG oslo_concurrency.lockutils [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.555s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.640551] env[62070]: DEBUG nova.objects.instance [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Lazy-loading 'resources' on Instance uuid 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 916.641029] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1449ec21-64d0-45ea-9719-f81c3f4b36d3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.648558] env[62070]: DEBUG oslo_vmware.api [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 916.648558] env[62070]: value = "task-1122054" [ 916.648558] env[62070]: _type = "Task" [ 916.648558] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.658715] env[62070]: DEBUG oslo_vmware.api [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122054, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.659846] env[62070]: INFO nova.scheduler.client.report [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Deleted allocations for instance 61ab347d-1342-4f59-8955-10d575993b77 [ 916.751912] env[62070]: DEBUG oslo_vmware.api [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122050, 'name': PowerOnVM_Task, 'duration_secs': 0.595627} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.753027] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 916.753027] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-95a98976-70b0-4bbd-a6bf-2286c5bf4562 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Updating instance 'fb054a32-c1aa-4884-a087-da5ad34cf3c4' progress to 100 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 916.791826] env[62070]: DEBUG nova.compute.manager [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 916.974179] env[62070]: DEBUG oslo_vmware.api [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122053, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143154} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.974491] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 916.974710] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 916.974920] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 916.975236] env[62070]: INFO nova.compute.manager [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Took 1.13 seconds to destroy the instance on the hypervisor. [ 916.975524] env[62070]: DEBUG oslo.service.loopingcall [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 916.975779] env[62070]: DEBUG nova.compute.manager [-] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 916.975888] env[62070]: DEBUG nova.network.neutron [-] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 917.168938] env[62070]: DEBUG oslo_vmware.api [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122054, 'name': PowerOffVM_Task, 'duration_secs': 0.181951} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.172400] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ec72d549-7bd3-4594-9c1e-1e62a799962e tempest-ServersAdmin275Test-1225965599 tempest-ServersAdmin275Test-1225965599-project-member] Lock "61ab347d-1342-4f59-8955-10d575993b77" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.744s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.176426] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 917.176426] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 917.179163] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-693c4707-a755-4236-ac19-fe887fecfc25 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.253787] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 917.254234] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 917.254481] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Deleting the datastore file [datastore1] 3d699ce5-4d21-48f3-8f17-0cd49aebf109 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 917.257361] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c163f7d6-c578-4087-8b85-8ca1432bb2fc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.271825] env[62070]: DEBUG oslo_vmware.api [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 917.271825] env[62070]: value = "task-1122056" [ 917.271825] env[62070]: _type = "Task" [ 917.271825] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.282972] env[62070]: DEBUG oslo_vmware.api [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122056, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.318702] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.433571] env[62070]: DEBUG nova.compute.manager [req-e13f537f-b1e0-49c7-b9a6-583c58682618 req-3c456f1d-f9ac-43f4-b0d6-97f09a403ef1 service nova] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Received event network-vif-deleted-11a1a24a-57ad-4bbf-9f1d-58391009ddde {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 917.434016] env[62070]: INFO nova.compute.manager [req-e13f537f-b1e0-49c7-b9a6-583c58682618 req-3c456f1d-f9ac-43f4-b0d6-97f09a403ef1 service nova] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Neutron deleted interface 11a1a24a-57ad-4bbf-9f1d-58391009ddde; detaching it from the instance and deleting it from the info cache [ 917.434016] env[62070]: DEBUG nova.network.neutron [req-e13f537f-b1e0-49c7-b9a6-583c58682618 req-3c456f1d-f9ac-43f4-b0d6-97f09a403ef1 service nova] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.517099] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d41aee6-5070-41a0-8e3b-09d939d07b1b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.525269] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f33d1e0-3b5f-47a3-9fae-eaaa0fb8c78f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.556894] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-039cb883-d6b7-4329-b804-9fc533b7535d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.564737] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b68aebc1-593f-4e1a-a845-4e02fd81d351 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.578097] env[62070]: DEBUG nova.compute.provider_tree [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 917.782922] env[62070]: DEBUG oslo_vmware.api [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122056, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192605} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.782922] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 917.782922] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 917.782922] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 917.782922] env[62070]: INFO nova.compute.manager [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Took 1.15 seconds to destroy the instance on the hypervisor. [ 917.782922] env[62070]: DEBUG oslo.service.loopingcall [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 917.782922] env[62070]: DEBUG nova.compute.manager [-] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 917.783749] env[62070]: DEBUG nova.network.neutron [-] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 917.881549] env[62070]: DEBUG nova.network.neutron [-] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.939453] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-19d66b06-f367-4028-8855-4fb2c3f75025 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.949845] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c5aa82-9948-4b7c-b79b-96c436087930 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.984159] env[62070]: DEBUG nova.compute.manager [req-e13f537f-b1e0-49c7-b9a6-583c58682618 req-3c456f1d-f9ac-43f4-b0d6-97f09a403ef1 service nova] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Detach interface failed, port_id=11a1a24a-57ad-4bbf-9f1d-58391009ddde, reason: Instance 328fbc92-8162-4e12-a02d-6e9cafe0c365 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 918.080901] env[62070]: DEBUG nova.scheduler.client.report [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 918.384642] env[62070]: INFO nova.compute.manager [-] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Took 1.41 seconds to deallocate network for instance. [ 918.571541] env[62070]: DEBUG nova.network.neutron [-] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.586514] env[62070]: DEBUG oslo_concurrency.lockutils [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.947s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.588969] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.055s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.590572] env[62070]: INFO nova.compute.claims [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 918.610572] env[62070]: INFO nova.scheduler.client.report [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Deleted allocations for instance 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef [ 918.801364] env[62070]: DEBUG oslo_concurrency.lockutils [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "fb054a32-c1aa-4884-a087-da5ad34cf3c4" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.801659] env[62070]: DEBUG oslo_concurrency.lockutils [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "fb054a32-c1aa-4884-a087-da5ad34cf3c4" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.801939] env[62070]: DEBUG nova.compute.manager [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Going to confirm migration 2 {{(pid=62070) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 918.893140] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.075177] env[62070]: INFO nova.compute.manager [-] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Took 1.29 seconds to deallocate network for instance. [ 919.118503] env[62070]: DEBUG oslo_concurrency.lockutils [None req-39228318-05e4-467b-92ec-aaea15cc9613 tempest-ServerGroupTestJSON-951679134 tempest-ServerGroupTestJSON-951679134-project-member] Lock "71c98ac8-4149-448b-bf0c-3bfdcc8f50ef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.143s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.412277] env[62070]: DEBUG oslo_concurrency.lockutils [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "refresh_cache-fb054a32-c1aa-4884-a087-da5ad34cf3c4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 919.412487] env[62070]: DEBUG oslo_concurrency.lockutils [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired lock "refresh_cache-fb054a32-c1aa-4884-a087-da5ad34cf3c4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.412677] env[62070]: DEBUG nova.network.neutron [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 919.412871] env[62070]: DEBUG nova.objects.instance [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lazy-loading 'info_cache' on Instance uuid fb054a32-c1aa-4884-a087-da5ad34cf3c4 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 919.490796] env[62070]: DEBUG nova.compute.manager [req-802a82c8-e0ea-4906-af6f-f3534aaa0a7e req-90398eeb-5e0d-4663-964d-d470fdc12076 service nova] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Received event network-vif-deleted-4ee007f5-ba54-4f18-b072-e8c7119e6eb2 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 919.582031] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.976401] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-989413fc-6460-4cc6-a74b-f163aea53d40 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.986905] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4035db02-bf21-44b7-858c-ea1096b77545 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.038293] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ab76d3a-bbfa-4420-b3f3-c94bffee6485 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.045849] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-416df2c6-a7b9-40ff-b722-2de625bfb255 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.060125] env[62070]: DEBUG nova.compute.provider_tree [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.562737] env[62070]: DEBUG nova.scheduler.client.report [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 920.788891] env[62070]: DEBUG nova.network.neutron [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Updating instance_info_cache with network_info: [{"id": "32f47285-7ff0-405e-849d-27e73999e359", "address": "fa:16:3e:dd:6e:ec", "network": {"id": "4888f989-958d-49ff-bf5a-06873e4cc624", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-906255456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d079c0ef3ed745fcaf69dc728dca4466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32f47285-7f", "ovs_interfaceid": "32f47285-7ff0-405e-849d-27e73999e359", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.076594] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.486s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.076594] env[62070]: DEBUG nova.compute.manager [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 921.080427] env[62070]: DEBUG oslo_concurrency.lockutils [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.068s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.080427] env[62070]: DEBUG nova.objects.instance [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Lazy-loading 'resources' on Instance uuid 963feecc-ff58-4cbb-8d6f-3f9035337087 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 921.293522] env[62070]: DEBUG oslo_concurrency.lockutils [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Releasing lock "refresh_cache-fb054a32-c1aa-4884-a087-da5ad34cf3c4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 921.293522] env[62070]: DEBUG nova.objects.instance [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lazy-loading 'migration_context' on Instance uuid fb054a32-c1aa-4884-a087-da5ad34cf3c4 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 921.584942] env[62070]: DEBUG nova.compute.utils [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 921.590213] env[62070]: DEBUG nova.compute.manager [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 921.590213] env[62070]: DEBUG nova.network.neutron [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 921.645288] env[62070]: DEBUG nova.policy [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7534320dee8f486e90f5174aa94d00bd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '925dff51764c4b56ae7ea05fbde2ecdd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 921.799170] env[62070]: DEBUG nova.objects.base [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62070) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 921.799170] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2d6ea5c-008c-4abb-b105-8f41c72c9675 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.827049] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28443f19-2ea2-4c40-8c7c-e49adedf4d3a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.832756] env[62070]: DEBUG oslo_vmware.api [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 921.832756] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5235a3a6-f09b-407c-5ca4-87a13821ac7a" [ 921.832756] env[62070]: _type = "Task" [ 921.832756] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.840678] env[62070]: DEBUG oslo_vmware.api [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5235a3a6-f09b-407c-5ca4-87a13821ac7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.967855] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-408c1626-e5dc-4883-8c35-3215fd65b3e1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.979442] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4afbb2b-3ae7-479d-9698-16183878a596 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.014635] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b7c7deb-cb1a-4a75-8ed7-0a2cbd2e2bd6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.022381] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38767921-4e7d-4005-ac86-1de516680bc1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.035774] env[62070]: DEBUG nova.compute.provider_tree [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 922.091116] env[62070]: DEBUG nova.network.neutron [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Successfully created port: c06feb60-bfb1-47ea-8764-52391d9b0b78 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 922.093519] env[62070]: DEBUG nova.compute.manager [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 922.345026] env[62070]: DEBUG oslo_vmware.api [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5235a3a6-f09b-407c-5ca4-87a13821ac7a, 'name': SearchDatastore_Task, 'duration_secs': 0.008733} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.345402] env[62070]: DEBUG oslo_concurrency.lockutils [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.539840] env[62070]: DEBUG nova.scheduler.client.report [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 922.690747] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9ee219f9-d47e-4d63-bcb6-65b63094a397 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.691033] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9ee219f9-d47e-4d63-bcb6-65b63094a397 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.046803] env[62070]: DEBUG oslo_concurrency.lockutils [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.967s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.049352] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.509s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.049940] env[62070]: DEBUG nova.objects.instance [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Lazy-loading 'resources' on Instance uuid d2cfcfac-4f15-4b16-9046-76722ee2e39b {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 923.072706] env[62070]: INFO nova.scheduler.client.report [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Deleted allocations for instance 963feecc-ff58-4cbb-8d6f-3f9035337087 [ 923.104166] env[62070]: DEBUG nova.compute.manager [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 923.134028] env[62070]: DEBUG nova.virt.hardware [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 923.134028] env[62070]: DEBUG nova.virt.hardware [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 923.134028] env[62070]: DEBUG nova.virt.hardware [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 923.134028] env[62070]: DEBUG nova.virt.hardware [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 923.134028] env[62070]: DEBUG nova.virt.hardware [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 923.134028] env[62070]: DEBUG nova.virt.hardware [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 923.134028] env[62070]: DEBUG nova.virt.hardware [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 923.138172] env[62070]: DEBUG nova.virt.hardware [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 923.138172] env[62070]: DEBUG nova.virt.hardware [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 923.138323] env[62070]: DEBUG nova.virt.hardware [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 923.140013] env[62070]: DEBUG nova.virt.hardware [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 923.140013] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a153158-f9bd-4467-98a0-ca7c479bc346 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.148187] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-941403c0-3332-4817-a484-57c65cb0e749 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.193607] env[62070]: INFO nova.compute.manager [None req-9ee219f9-d47e-4d63-bcb6-65b63094a397 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Detaching volume 0a76875f-e286-469c-8be6-887850e6ce36 [ 923.230860] env[62070]: INFO nova.virt.block_device [None req-9ee219f9-d47e-4d63-bcb6-65b63094a397 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Attempting to driver detach volume 0a76875f-e286-469c-8be6-887850e6ce36 from mountpoint /dev/sdb [ 923.231260] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ee219f9-d47e-4d63-bcb6-65b63094a397 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Volume detach. Driver type: vmdk {{(pid=62070) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 923.231515] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ee219f9-d47e-4d63-bcb6-65b63094a397 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245439', 'volume_id': '0a76875f-e286-469c-8be6-887850e6ce36', 'name': 'volume-0a76875f-e286-469c-8be6-887850e6ce36', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4a5f644a-1670-4c6b-a762-f87f1ee4cce5', 'attached_at': '', 'detached_at': '', 'volume_id': '0a76875f-e286-469c-8be6-887850e6ce36', 'serial': '0a76875f-e286-469c-8be6-887850e6ce36'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 923.232432] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1defeb1-457f-4b84-8e6b-64306a9f8391 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.258365] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa9086fe-a5b5-4812-8b0f-271af5ffabf4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.265701] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b68b6f6-96c0-48c1-98e4-259787a34163 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.287853] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06a8a9a-7dd4-4802-85e1-1e8d868b5bd7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.304906] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ee219f9-d47e-4d63-bcb6-65b63094a397 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] The volume has not been displaced from its original location: [datastore2] volume-0a76875f-e286-469c-8be6-887850e6ce36/volume-0a76875f-e286-469c-8be6-887850e6ce36.vmdk. No consolidation needed. {{(pid=62070) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 923.310563] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ee219f9-d47e-4d63-bcb6-65b63094a397 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Reconfiguring VM instance instance-0000003f to detach disk 2001 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 923.310757] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-450d64b8-e15e-4b14-b060-6ab8c9f49e65 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.330769] env[62070]: DEBUG oslo_vmware.api [None req-9ee219f9-d47e-4d63-bcb6-65b63094a397 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 923.330769] env[62070]: value = "task-1122057" [ 923.330769] env[62070]: _type = "Task" [ 923.330769] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.339060] env[62070]: DEBUG oslo_vmware.api [None req-9ee219f9-d47e-4d63-bcb6-65b63094a397 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122057, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.581220] env[62070]: DEBUG oslo_concurrency.lockutils [None req-80bd56b8-16e3-42ee-acd0-234370ddd048 tempest-ServerRescueTestJSON-626358069 tempest-ServerRescueTestJSON-626358069-project-member] Lock "963feecc-ff58-4cbb-8d6f-3f9035337087" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.580s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.842211] env[62070]: DEBUG oslo_vmware.api [None req-9ee219f9-d47e-4d63-bcb6-65b63094a397 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122057, 'name': ReconfigVM_Task, 'duration_secs': 0.36315} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.844166] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ee219f9-d47e-4d63-bcb6-65b63094a397 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Reconfigured VM instance instance-0000003f to detach disk 2001 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 923.848979] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76b586f3-c40b-4b44-ac99-53d1a319ce03 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.866262] env[62070]: DEBUG oslo_vmware.api [None req-9ee219f9-d47e-4d63-bcb6-65b63094a397 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 923.866262] env[62070]: value = "task-1122058" [ 923.866262] env[62070]: _type = "Task" [ 923.866262] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.876421] env[62070]: DEBUG oslo_vmware.api [None req-9ee219f9-d47e-4d63-bcb6-65b63094a397 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122058, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.887311] env[62070]: DEBUG nova.compute.manager [req-561c7c86-4813-4248-9043-1cb872576927 req-d7d0901c-9dcd-415c-956f-6c4c885fa4a7 service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Received event network-vif-plugged-c06feb60-bfb1-47ea-8764-52391d9b0b78 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 923.887674] env[62070]: DEBUG oslo_concurrency.lockutils [req-561c7c86-4813-4248-9043-1cb872576927 req-d7d0901c-9dcd-415c-956f-6c4c885fa4a7 service nova] Acquiring lock "cf52cee8-874e-44e8-a36e-49ac20f3e312-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.887964] env[62070]: DEBUG oslo_concurrency.lockutils [req-561c7c86-4813-4248-9043-1cb872576927 req-d7d0901c-9dcd-415c-956f-6c4c885fa4a7 service nova] Lock "cf52cee8-874e-44e8-a36e-49ac20f3e312-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.888249] env[62070]: DEBUG oslo_concurrency.lockutils [req-561c7c86-4813-4248-9043-1cb872576927 req-d7d0901c-9dcd-415c-956f-6c4c885fa4a7 service nova] Lock "cf52cee8-874e-44e8-a36e-49ac20f3e312-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.888480] env[62070]: DEBUG nova.compute.manager [req-561c7c86-4813-4248-9043-1cb872576927 req-d7d0901c-9dcd-415c-956f-6c4c885fa4a7 service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] No waiting events found dispatching network-vif-plugged-c06feb60-bfb1-47ea-8764-52391d9b0b78 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 923.888797] env[62070]: WARNING nova.compute.manager [req-561c7c86-4813-4248-9043-1cb872576927 req-d7d0901c-9dcd-415c-956f-6c4c885fa4a7 service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Received unexpected event network-vif-plugged-c06feb60-bfb1-47ea-8764-52391d9b0b78 for instance with vm_state building and task_state spawning. [ 923.957478] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28fcc830-5dc4-476e-9ab7-2710780097dc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.965628] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33402eed-e82f-4e67-ba74-208e4362806e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.001012] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4909a2f-f3c2-40f5-97c3-34b118b08dc5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.010415] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-340a2e43-76a6-442f-9b38-e3aa6b620847 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.029429] env[62070]: DEBUG nova.compute.provider_tree [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 924.089387] env[62070]: DEBUG nova.network.neutron [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Successfully updated port: c06feb60-bfb1-47ea-8764-52391d9b0b78 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 924.376445] env[62070]: DEBUG oslo_vmware.api [None req-9ee219f9-d47e-4d63-bcb6-65b63094a397 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122058, 'name': ReconfigVM_Task, 'duration_secs': 0.137342} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.376829] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ee219f9-d47e-4d63-bcb6-65b63094a397 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245439', 'volume_id': '0a76875f-e286-469c-8be6-887850e6ce36', 'name': 'volume-0a76875f-e286-469c-8be6-887850e6ce36', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4a5f644a-1670-4c6b-a762-f87f1ee4cce5', 'attached_at': '', 'detached_at': '', 'volume_id': '0a76875f-e286-469c-8be6-887850e6ce36', 'serial': '0a76875f-e286-469c-8be6-887850e6ce36'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 924.533219] env[62070]: DEBUG nova.scheduler.client.report [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 924.594031] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "refresh_cache-cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 924.594031] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired lock "refresh_cache-cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.594031] env[62070]: DEBUG nova.network.neutron [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 924.950868] env[62070]: DEBUG nova.objects.instance [None req-9ee219f9-d47e-4d63-bcb6-65b63094a397 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lazy-loading 'flavor' on Instance uuid 4a5f644a-1670-4c6b-a762-f87f1ee4cce5 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 925.038766] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.989s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.043689] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 28.492s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.047274] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.002s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.047274] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62070) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 925.047274] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.459s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.047274] env[62070]: DEBUG nova.objects.instance [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lazy-loading 'resources' on Instance uuid 10672096-00ba-4481-8ab3-085a185076db {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 925.051720] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cca1eebe-2c8d-4102-865b-db58013bf281 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.063372] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56111c8c-34f3-4e8d-957b-e6478fad3f76 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.082670] env[62070]: INFO nova.scheduler.client.report [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Deleted allocations for instance d2cfcfac-4f15-4b16-9046-76722ee2e39b [ 925.084286] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf1f3c4-909c-47c5-a055-2ae1882bef5f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.096198] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d01b236d-1810-4439-839d-dda3997338e3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.136302] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179382MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=62070) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 925.136449] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.161935] env[62070]: DEBUG nova.network.neutron [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 925.382738] env[62070]: DEBUG nova.network.neutron [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Updating instance_info_cache with network_info: [{"id": "c06feb60-bfb1-47ea-8764-52391d9b0b78", "address": "fa:16:3e:66:3c:b1", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc06feb60-bf", "ovs_interfaceid": "c06feb60-bfb1-47ea-8764-52391d9b0b78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.599461] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6c7a189b-a3d7-40da-a908-7213e68deb41 tempest-ServerAddressesNegativeTestJSON-1593054557 tempest-ServerAddressesNegativeTestJSON-1593054557-project-member] Lock "d2cfcfac-4f15-4b16-9046-76722ee2e39b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.066s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.886826] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Releasing lock "refresh_cache-cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.887238] env[62070]: DEBUG nova.compute.manager [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Instance network_info: |[{"id": "c06feb60-bfb1-47ea-8764-52391d9b0b78", "address": "fa:16:3e:66:3c:b1", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc06feb60-bf", "ovs_interfaceid": "c06feb60-bfb1-47ea-8764-52391d9b0b78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 925.887764] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:3c:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2c7c1b46-cb81-45da-b5aa-7905d4da5854', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c06feb60-bfb1-47ea-8764-52391d9b0b78', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 925.898149] env[62070]: DEBUG oslo.service.loopingcall [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 925.901471] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 925.901471] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b68cf1d7-1f56-43fd-971a-1962497d1e0d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.933738] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f1ea80b0-4069-428b-a258-f9c101cf77f5 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.935342] env[62070]: DEBUG nova.compute.manager [req-bf0bd51e-968c-42ff-82d4-a5cd2dcc72c8 req-23b4d2f2-f1ec-464b-8da8-eeac7354c568 service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Received event network-changed-c06feb60-bfb1-47ea-8764-52391d9b0b78 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 925.935598] env[62070]: DEBUG nova.compute.manager [req-bf0bd51e-968c-42ff-82d4-a5cd2dcc72c8 req-23b4d2f2-f1ec-464b-8da8-eeac7354c568 service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Refreshing instance network info cache due to event network-changed-c06feb60-bfb1-47ea-8764-52391d9b0b78. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 925.935878] env[62070]: DEBUG oslo_concurrency.lockutils [req-bf0bd51e-968c-42ff-82d4-a5cd2dcc72c8 req-23b4d2f2-f1ec-464b-8da8-eeac7354c568 service nova] Acquiring lock "refresh_cache-cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.936105] env[62070]: DEBUG oslo_concurrency.lockutils [req-bf0bd51e-968c-42ff-82d4-a5cd2dcc72c8 req-23b4d2f2-f1ec-464b-8da8-eeac7354c568 service nova] Acquired lock "refresh_cache-cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.936345] env[62070]: DEBUG nova.network.neutron [req-bf0bd51e-968c-42ff-82d4-a5cd2dcc72c8 req-23b4d2f2-f1ec-464b-8da8-eeac7354c568 service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Refreshing network info cache for port c06feb60-bfb1-47ea-8764-52391d9b0b78 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 925.945684] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 925.945684] env[62070]: value = "task-1122059" [ 925.945684] env[62070]: _type = "Task" [ 925.945684] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.952873] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7eee3bf-1daf-4e10-8d5d-e223ab5a80c3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.964829] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9ee219f9-d47e-4d63-bcb6-65b63094a397 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.274s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.966050] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122059, 'name': CreateVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.970041] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f1ea80b0-4069-428b-a258-f9c101cf77f5 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.036s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.970627] env[62070]: DEBUG nova.compute.manager [None req-f1ea80b0-4069-428b-a258-f9c101cf77f5 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 925.971384] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6afb9ac4-7533-43a1-bf71-5f696849591f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.975835] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae25a411-9320-4c36-b465-d0bcc86708b9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.984323] env[62070]: DEBUG nova.compute.manager [None req-f1ea80b0-4069-428b-a258-f9c101cf77f5 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62070) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 925.984945] env[62070]: DEBUG nova.objects.instance [None req-f1ea80b0-4069-428b-a258-f9c101cf77f5 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lazy-loading 'flavor' on Instance uuid 4a5f644a-1670-4c6b-a762-f87f1ee4cce5 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 926.020347] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-162ad8ae-d096-4e3a-b9fd-9df51897188e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.025382] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1ea80b0-4069-428b-a258-f9c101cf77f5 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 926.025382] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8d3d2b4a-aa21-4b97-afbc-297d89307660 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.033828] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647346d0-0431-46f4-b722-c6bdac3f21a6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.037961] env[62070]: DEBUG oslo_vmware.api [None req-f1ea80b0-4069-428b-a258-f9c101cf77f5 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 926.037961] env[62070]: value = "task-1122060" [ 926.037961] env[62070]: _type = "Task" [ 926.037961] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.050748] env[62070]: DEBUG nova.compute.provider_tree [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 926.057962] env[62070]: DEBUG oslo_vmware.api [None req-f1ea80b0-4069-428b-a258-f9c101cf77f5 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122060, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.456652] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122059, 'name': CreateVM_Task, 'duration_secs': 0.406387} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.457016] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 926.457615] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.457789] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.458136] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 926.458833] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4669794b-6e38-436b-a311-8fc741a1b721 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.464025] env[62070]: DEBUG oslo_vmware.api [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 926.464025] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5278d076-1029-9c1a-c988-c7b7111d76b9" [ 926.464025] env[62070]: _type = "Task" [ 926.464025] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.473434] env[62070]: DEBUG oslo_vmware.api [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5278d076-1029-9c1a-c988-c7b7111d76b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.554912] env[62070]: DEBUG nova.scheduler.client.report [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 926.559526] env[62070]: DEBUG oslo_vmware.api [None req-f1ea80b0-4069-428b-a258-f9c101cf77f5 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122060, 'name': PowerOffVM_Task, 'duration_secs': 0.225262} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.560052] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1ea80b0-4069-428b-a258-f9c101cf77f5 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 926.560249] env[62070]: DEBUG nova.compute.manager [None req-f1ea80b0-4069-428b-a258-f9c101cf77f5 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 926.561056] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1b9170b-28e4-40f2-805d-c26abd78d487 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.718268] env[62070]: DEBUG nova.network.neutron [req-bf0bd51e-968c-42ff-82d4-a5cd2dcc72c8 req-23b4d2f2-f1ec-464b-8da8-eeac7354c568 service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Updated VIF entry in instance network info cache for port c06feb60-bfb1-47ea-8764-52391d9b0b78. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 926.718268] env[62070]: DEBUG nova.network.neutron [req-bf0bd51e-968c-42ff-82d4-a5cd2dcc72c8 req-23b4d2f2-f1ec-464b-8da8-eeac7354c568 service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Updating instance_info_cache with network_info: [{"id": "c06feb60-bfb1-47ea-8764-52391d9b0b78", "address": "fa:16:3e:66:3c:b1", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc06feb60-bf", "ovs_interfaceid": "c06feb60-bfb1-47ea-8764-52391d9b0b78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.976652] env[62070]: DEBUG oslo_vmware.api [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5278d076-1029-9c1a-c988-c7b7111d76b9, 'name': SearchDatastore_Task, 'duration_secs': 0.049373} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.977089] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.977678] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 926.977984] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.978250] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.978379] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 926.978773] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6249527-b766-43b9-afbd-3b5d672a73c4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.987780] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 926.987878] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 926.988776] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ab2937e-3197-45e4-83a9-0f0e4608d76e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.998167] env[62070]: DEBUG oslo_vmware.api [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 926.998167] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ce3f35-1b0d-43c3-7cda-9b68e2c52e02" [ 926.998167] env[62070]: _type = "Task" [ 926.998167] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.007468] env[62070]: DEBUG oslo_vmware.api [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52ce3f35-1b0d-43c3-7cda-9b68e2c52e02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.060969] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.015s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.064858] env[62070]: DEBUG oslo_concurrency.lockutils [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.300s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.066424] env[62070]: INFO nova.compute.claims [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 927.076396] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f1ea80b0-4069-428b-a258-f9c101cf77f5 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.107s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.104308] env[62070]: INFO nova.scheduler.client.report [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Deleted allocations for instance 10672096-00ba-4481-8ab3-085a185076db [ 927.221423] env[62070]: DEBUG oslo_concurrency.lockutils [req-bf0bd51e-968c-42ff-82d4-a5cd2dcc72c8 req-23b4d2f2-f1ec-464b-8da8-eeac7354c568 service nova] Releasing lock "refresh_cache-cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.305891] env[62070]: DEBUG nova.objects.instance [None req-7c46d9b1-06d6-4069-bf6e-f1939b3ac6ae tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lazy-loading 'flavor' on Instance uuid 4a5f644a-1670-4c6b-a762-f87f1ee4cce5 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 927.513345] env[62070]: DEBUG oslo_vmware.api [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52ce3f35-1b0d-43c3-7cda-9b68e2c52e02, 'name': SearchDatastore_Task, 'duration_secs': 0.012656} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.514039] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89242357-c1f2-48b7-beaa-d5ba35b53609 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.520213] env[62070]: DEBUG oslo_vmware.api [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 927.520213] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52f5bcf3-76ac-1e90-f96a-c71146bf18cf" [ 927.520213] env[62070]: _type = "Task" [ 927.520213] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.528955] env[62070]: DEBUG oslo_vmware.api [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52f5bcf3-76ac-1e90-f96a-c71146bf18cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.612854] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bcbfa2d3-8518-4c21-a4ab-0c0fd684eb49 tempest-ListServerFiltersTestJSON-1772926812 tempest-ListServerFiltersTestJSON-1772926812-project-member] Lock "10672096-00ba-4481-8ab3-085a185076db" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.065s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.814828] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7c46d9b1-06d6-4069-bf6e-f1939b3ac6ae tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "refresh_cache-4a5f644a-1670-4c6b-a762-f87f1ee4cce5" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.815054] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7c46d9b1-06d6-4069-bf6e-f1939b3ac6ae tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquired lock "refresh_cache-4a5f644a-1670-4c6b-a762-f87f1ee4cce5" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.815230] env[62070]: DEBUG nova.network.neutron [None req-7c46d9b1-06d6-4069-bf6e-f1939b3ac6ae tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 927.815552] env[62070]: DEBUG nova.objects.instance [None req-7c46d9b1-06d6-4069-bf6e-f1939b3ac6ae tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lazy-loading 'info_cache' on Instance uuid 4a5f644a-1670-4c6b-a762-f87f1ee4cce5 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 928.036123] env[62070]: DEBUG oslo_vmware.api [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52f5bcf3-76ac-1e90-f96a-c71146bf18cf, 'name': SearchDatastore_Task, 'duration_secs': 0.008983} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.037203] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 928.037621] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] cf52cee8-874e-44e8-a36e-49ac20f3e312/cf52cee8-874e-44e8-a36e-49ac20f3e312.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 928.039379] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5bb0657b-d324-4e34-af79-eddf9e1c4f3e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.044859] env[62070]: DEBUG oslo_vmware.api [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 928.044859] env[62070]: value = "task-1122061" [ 928.044859] env[62070]: _type = "Task" [ 928.044859] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.060143] env[62070]: DEBUG oslo_vmware.api [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122061, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.181504] env[62070]: DEBUG oslo_concurrency.lockutils [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquiring lock "53a1791d-38fd-4721-b82c-2f0922348300" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.181504] env[62070]: DEBUG oslo_concurrency.lockutils [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "53a1791d-38fd-4721-b82c-2f0922348300" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.319509] env[62070]: DEBUG nova.objects.base [None req-7c46d9b1-06d6-4069-bf6e-f1939b3ac6ae tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Object Instance<4a5f644a-1670-4c6b-a762-f87f1ee4cce5> lazy-loaded attributes: flavor,info_cache {{(pid=62070) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 928.432405] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b0854c-f0dc-48fa-9de8-7b7ab5c70519 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.441333] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf4c6589-785d-4973-9a33-a8add99a3604 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.480423] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57a20d2d-e24d-4dac-9830-00ad548a32d9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.488841] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d62823-e7e1-4d8c-86c5-bd84e5ba986f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.502566] env[62070]: DEBUG nova.compute.provider_tree [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 928.555810] env[62070]: DEBUG oslo_vmware.api [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122061, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.4591} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.555810] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] cf52cee8-874e-44e8-a36e-49ac20f3e312/cf52cee8-874e-44e8-a36e-49ac20f3e312.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 928.556174] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 928.557058] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d10d688e-8a2e-41b6-9175-681f132c9ff6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.563441] env[62070]: DEBUG oslo_vmware.api [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 928.563441] env[62070]: value = "task-1122062" [ 928.563441] env[62070]: _type = "Task" [ 928.563441] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.572185] env[62070]: DEBUG oslo_vmware.api [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122062, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.686486] env[62070]: DEBUG nova.compute.manager [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 929.008770] env[62070]: DEBUG nova.scheduler.client.report [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 929.032745] env[62070]: DEBUG nova.network.neutron [None req-7c46d9b1-06d6-4069-bf6e-f1939b3ac6ae tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Updating instance_info_cache with network_info: [{"id": "3a8213ef-a979-487a-8756-7bfecdf4ba10", "address": "fa:16:3e:25:2e:9f", "network": {"id": "443d2d62-bcef-44b2-814a-3e5dc50abc04", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-772061432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85c0cc8e0f544bfbb76970d3123fbb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a8213ef-a9", "ovs_interfaceid": "3a8213ef-a979-487a-8756-7bfecdf4ba10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.074043] env[62070]: DEBUG oslo_vmware.api [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122062, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06259} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.074043] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 929.074785] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e057b6d9-ca95-4cd8-a93c-8f682a93886a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.098353] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] cf52cee8-874e-44e8-a36e-49ac20f3e312/cf52cee8-874e-44e8-a36e-49ac20f3e312.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 929.098692] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-462cd8a6-b8e9-4427-8feb-4aa292bcf7b7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.119756] env[62070]: DEBUG oslo_vmware.api [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 929.119756] env[62070]: value = "task-1122063" [ 929.119756] env[62070]: _type = "Task" [ 929.119756] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.127846] env[62070]: DEBUG oslo_vmware.api [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122063, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.215666] env[62070]: DEBUG oslo_concurrency.lockutils [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.514797] env[62070]: DEBUG oslo_concurrency.lockutils [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.450s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.515413] env[62070]: DEBUG nova.compute.manager [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 929.518121] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.893s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.518388] env[62070]: DEBUG nova.objects.instance [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Lazy-loading 'resources' on Instance uuid 559eee5b-0834-4dcf-a436-5e58644c7a3b {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 929.535281] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7c46d9b1-06d6-4069-bf6e-f1939b3ac6ae tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Releasing lock "refresh_cache-4a5f644a-1670-4c6b-a762-f87f1ee4cce5" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.629669] env[62070]: DEBUG oslo_vmware.api [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122063, 'name': ReconfigVM_Task, 'duration_secs': 0.302001} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.630015] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Reconfigured VM instance instance-00000051 to attach disk [datastore2] cf52cee8-874e-44e8-a36e-49ac20f3e312/cf52cee8-874e-44e8-a36e-49ac20f3e312.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 929.630570] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e08623f9-26db-455c-8df7-0cbaf15ea005 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.636423] env[62070]: DEBUG oslo_vmware.api [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 929.636423] env[62070]: value = "task-1122064" [ 929.636423] env[62070]: _type = "Task" [ 929.636423] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.643880] env[62070]: DEBUG oslo_vmware.api [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122064, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.024813] env[62070]: DEBUG nova.compute.utils [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 930.027064] env[62070]: DEBUG nova.compute.manager [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Not allocating networking since 'none' was specified. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 930.039257] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c46d9b1-06d6-4069-bf6e-f1939b3ac6ae tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 930.039785] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3bb3a1f5-893d-4e6f-bbf6-c565c4671a6e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.046825] env[62070]: DEBUG oslo_vmware.api [None req-7c46d9b1-06d6-4069-bf6e-f1939b3ac6ae tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 930.046825] env[62070]: value = "task-1122065" [ 930.046825] env[62070]: _type = "Task" [ 930.046825] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.058285] env[62070]: DEBUG oslo_vmware.api [None req-7c46d9b1-06d6-4069-bf6e-f1939b3ac6ae tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122065, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.147342] env[62070]: DEBUG oslo_vmware.api [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122064, 'name': Rename_Task, 'duration_secs': 0.136767} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.147651] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 930.147905] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3b762946-db68-412c-8020-211e113e454d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.154159] env[62070]: DEBUG oslo_vmware.api [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 930.154159] env[62070]: value = "task-1122066" [ 930.154159] env[62070]: _type = "Task" [ 930.154159] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.162919] env[62070]: DEBUG oslo_vmware.api [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122066, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.340039] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b586c64a-dad2-4e83-aa81-940c60b2c7c5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.349153] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7608157-7b37-4f68-90c2-161dd8234105 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.382128] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a01e0105-c5f0-49c2-abc3-66022684fffd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.390882] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02268434-2f9f-4d77-ac36-a98cebb74752 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.404437] env[62070]: DEBUG nova.compute.provider_tree [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 930.528587] env[62070]: DEBUG nova.compute.manager [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 930.558067] env[62070]: DEBUG oslo_vmware.api [None req-7c46d9b1-06d6-4069-bf6e-f1939b3ac6ae tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122065, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.665575] env[62070]: DEBUG oslo_vmware.api [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122066, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.908073] env[62070]: DEBUG nova.scheduler.client.report [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 931.061026] env[62070]: DEBUG oslo_vmware.api [None req-7c46d9b1-06d6-4069-bf6e-f1939b3ac6ae tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122065, 'name': PowerOnVM_Task, 'duration_secs': 0.561872} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.061026] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c46d9b1-06d6-4069-bf6e-f1939b3ac6ae tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 931.061026] env[62070]: DEBUG nova.compute.manager [None req-7c46d9b1-06d6-4069-bf6e-f1939b3ac6ae tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 931.061026] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-076016f7-9234-476d-88b1-720b04396b0f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.164858] env[62070]: DEBUG oslo_vmware.api [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122066, 'name': PowerOnVM_Task, 'duration_secs': 0.594257} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.165322] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 931.165648] env[62070]: INFO nova.compute.manager [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Took 8.06 seconds to spawn the instance on the hypervisor. [ 931.165958] env[62070]: DEBUG nova.compute.manager [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 931.166804] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b5860cb-bc26-416e-858d-b5b7d9f8e44c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.414480] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.896s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.417374] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.045s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.417863] env[62070]: DEBUG nova.objects.instance [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Lazy-loading 'resources' on Instance uuid e51d0146-502a-4ace-856e-b0dbcc11edea {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 931.440404] env[62070]: INFO nova.scheduler.client.report [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Deleted allocations for instance 559eee5b-0834-4dcf-a436-5e58644c7a3b [ 931.540154] env[62070]: DEBUG nova.compute.manager [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 931.569896] env[62070]: DEBUG nova.virt.hardware [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 931.570175] env[62070]: DEBUG nova.virt.hardware [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 931.570342] env[62070]: DEBUG nova.virt.hardware [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 931.570528] env[62070]: DEBUG nova.virt.hardware [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 931.570708] env[62070]: DEBUG nova.virt.hardware [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 931.570917] env[62070]: DEBUG nova.virt.hardware [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 931.571161] env[62070]: DEBUG nova.virt.hardware [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 931.571333] env[62070]: DEBUG nova.virt.hardware [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 931.571573] env[62070]: DEBUG nova.virt.hardware [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 931.571769] env[62070]: DEBUG nova.virt.hardware [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 931.572096] env[62070]: DEBUG nova.virt.hardware [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 931.576124] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf7d194-5154-413c-a730-df05313fc281 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.586096] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7fa42f4-edf4-468f-aa96-f88b87a2f4a6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.600768] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Instance VIF info [] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 931.606845] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Creating folder: Project (5e099111b3e24f7c8b0ea57c17a8735b). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 931.607874] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-23645f52-a969-4f3f-84af-a8e4af3ee03a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.618849] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Created folder: Project (5e099111b3e24f7c8b0ea57c17a8735b) in parent group-v245319. [ 931.619079] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Creating folder: Instances. Parent ref: group-v245457. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 931.619331] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9042f72a-5e30-4a06-afec-2b3eb9912ac5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.630136] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Created folder: Instances in parent group-v245457. [ 931.630434] env[62070]: DEBUG oslo.service.loopingcall [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 931.630765] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 931.631238] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ef99dd94-abff-4463-8679-fa8920a07b16 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.654025] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 931.654025] env[62070]: value = "task-1122069" [ 931.654025] env[62070]: _type = "Task" [ 931.654025] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.659983] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122069, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.688390] env[62070]: INFO nova.compute.manager [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Took 41.17 seconds to build instance. [ 931.949248] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a559e031-119e-4aae-aec9-40a6a2a17343 tempest-ServerRescueTestJSONUnderV235-557690179 tempest-ServerRescueTestJSONUnderV235-557690179-project-member] Lock "559eee5b-0834-4dcf-a436-5e58644c7a3b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.744s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.166557] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122069, 'name': CreateVM_Task, 'duration_secs': 0.321855} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.167776] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 932.167776] env[62070]: DEBUG oslo_concurrency.lockutils [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.168023] env[62070]: DEBUG oslo_concurrency.lockutils [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.168556] env[62070]: DEBUG oslo_concurrency.lockutils [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 932.171394] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dff5c9c8-a26c-47cc-8dd5-83102f680d50 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.177546] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Waiting for the task: (returnval){ [ 932.177546] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]523002ed-62a2-8269-1de6-dec2ebe2eb83" [ 932.177546] env[62070]: _type = "Task" [ 932.177546] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.185950] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]523002ed-62a2-8269-1de6-dec2ebe2eb83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.190753] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8a0f2bb4-1ee9-407f-978d-94801eeae730 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "cf52cee8-874e-44e8-a36e-49ac20f3e312" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.682s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.293818] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0aecfce-b78e-48a2-ac76-79d7e9882106 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.301474] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d50471ba-9cc3-4af0-96a0-02186a5f1511 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.335704] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ac9a4c1-251f-4913-893b-f6ee025353fd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.344558] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-952db44b-c1d7-4735-be3c-32db04de7a73 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.360381] env[62070]: DEBUG nova.compute.provider_tree [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 932.687986] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]523002ed-62a2-8269-1de6-dec2ebe2eb83, 'name': SearchDatastore_Task, 'duration_secs': 0.00909} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.688373] env[62070]: DEBUG oslo_concurrency.lockutils [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.688616] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 932.688917] env[62070]: DEBUG oslo_concurrency.lockutils [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.689077] env[62070]: DEBUG oslo_concurrency.lockutils [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.689344] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 932.689474] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45edbdd2-3938-4c60-8fba-9a86011a3638 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.698270] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 932.700884] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 932.700884] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9600f0a-07a6-4ff6-a895-c9916f806beb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.706405] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Waiting for the task: (returnval){ [ 932.706405] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52861b20-1d6e-9ffd-c559-9a93c35e7d58" [ 932.706405] env[62070]: _type = "Task" [ 932.706405] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.713358] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52861b20-1d6e-9ffd-c559-9a93c35e7d58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.863966] env[62070]: DEBUG nova.scheduler.client.report [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 933.000735] env[62070]: DEBUG nova.compute.manager [req-cbf92d3f-d57b-44f1-9403-b30e03f2517e req-8eb48c40-2383-4456-9145-fbb898542ec0 service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Received event network-changed-45420f68-e309-4569-8dac-28e16d9417d7 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 933.000992] env[62070]: DEBUG nova.compute.manager [req-cbf92d3f-d57b-44f1-9403-b30e03f2517e req-8eb48c40-2383-4456-9145-fbb898542ec0 service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Refreshing instance network info cache due to event network-changed-45420f68-e309-4569-8dac-28e16d9417d7. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 933.001290] env[62070]: DEBUG oslo_concurrency.lockutils [req-cbf92d3f-d57b-44f1-9403-b30e03f2517e req-8eb48c40-2383-4456-9145-fbb898542ec0 service nova] Acquiring lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.001543] env[62070]: DEBUG oslo_concurrency.lockutils [req-cbf92d3f-d57b-44f1-9403-b30e03f2517e req-8eb48c40-2383-4456-9145-fbb898542ec0 service nova] Acquired lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.001719] env[62070]: DEBUG nova.network.neutron [req-cbf92d3f-d57b-44f1-9403-b30e03f2517e req-8eb48c40-2383-4456-9145-fbb898542ec0 service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Refreshing network info cache for port 45420f68-e309-4569-8dac-28e16d9417d7 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 933.217030] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52861b20-1d6e-9ffd-c559-9a93c35e7d58, 'name': SearchDatastore_Task, 'duration_secs': 0.008605} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.217817] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50d79e09-83f0-4871-8510-2984014b8995 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.223026] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Waiting for the task: (returnval){ [ 933.223026] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52e3fa39-7740-0491-8e8a-b3d01f7c9abb" [ 933.223026] env[62070]: _type = "Task" [ 933.223026] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.230842] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52e3fa39-7740-0491-8e8a-b3d01f7c9abb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.373235] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.956s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.376444] env[62070]: DEBUG oslo_concurrency.lockutils [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.150s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.378268] env[62070]: INFO nova.compute.claims [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 933.406910] env[62070]: INFO nova.scheduler.client.report [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Deleted allocations for instance e51d0146-502a-4ace-856e-b0dbcc11edea [ 933.638275] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "a5cba512-9b50-4ca3-93eb-345be12dc588" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.638275] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "a5cba512-9b50-4ca3-93eb-345be12dc588" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.722900] env[62070]: DEBUG nova.network.neutron [req-cbf92d3f-d57b-44f1-9403-b30e03f2517e req-8eb48c40-2383-4456-9145-fbb898542ec0 service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Updated VIF entry in instance network info cache for port 45420f68-e309-4569-8dac-28e16d9417d7. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 933.722900] env[62070]: DEBUG nova.network.neutron [req-cbf92d3f-d57b-44f1-9403-b30e03f2517e req-8eb48c40-2383-4456-9145-fbb898542ec0 service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Updating instance_info_cache with network_info: [{"id": "45420f68-e309-4569-8dac-28e16d9417d7", "address": "fa:16:3e:08:c2:70", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45420f68-e3", "ovs_interfaceid": "45420f68-e309-4569-8dac-28e16d9417d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.739024] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52e3fa39-7740-0491-8e8a-b3d01f7c9abb, 'name': SearchDatastore_Task, 'duration_secs': 0.009011} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.739024] env[62070]: DEBUG oslo_concurrency.lockutils [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.739024] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 1440361b-d3b4-4c1c-995c-fe7ff99ee297/1440361b-d3b4-4c1c-995c-fe7ff99ee297.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 933.739024] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-312d4cee-bbe6-40ea-bcf0-edec06a0a7b0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.748022] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Waiting for the task: (returnval){ [ 933.748022] env[62070]: value = "task-1122070" [ 933.748022] env[62070]: _type = "Task" [ 933.748022] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.755355] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122070, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.916590] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4e7b3e9b-a4fd-4941-87c2-296d13c2b794 tempest-ServerPasswordTestJSON-1580526241 tempest-ServerPasswordTestJSON-1580526241-project-member] Lock "e51d0146-502a-4ace-856e-b0dbcc11edea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.497s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.144419] env[62070]: DEBUG nova.compute.manager [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 934.223439] env[62070]: DEBUG oslo_concurrency.lockutils [req-cbf92d3f-d57b-44f1-9403-b30e03f2517e req-8eb48c40-2383-4456-9145-fbb898542ec0 service nova] Releasing lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.262138] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122070, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.653870] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0497a04-6b8f-4e62-8673-aa85ad5704b9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.662076] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2849764c-de73-4d5f-80dc-93da0205ca30 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.667172] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.695969] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f41cd124-0b65-4753-a929-2693321d83b2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.703202] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb7ee06-c0fb-4229-97c3-1c40ff1ac85c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.720645] env[62070]: DEBUG nova.compute.provider_tree [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 934.759829] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122070, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.591195} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.760157] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 1440361b-d3b4-4c1c-995c-fe7ff99ee297/1440361b-d3b4-4c1c-995c-fe7ff99ee297.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 934.760412] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 934.760603] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8490d841-cb9e-4789-b4fc-534a4234bc4b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.767530] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Waiting for the task: (returnval){ [ 934.767530] env[62070]: value = "task-1122071" [ 934.767530] env[62070]: _type = "Task" [ 934.767530] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.777020] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122071, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.044163] env[62070]: DEBUG nova.compute.manager [req-69671eeb-1670-476f-96a9-bcaa016d6f5d req-28d82c48-e84b-41a1-8ddb-bca2df819fe3 service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Received event network-changed-c06feb60-bfb1-47ea-8764-52391d9b0b78 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 935.044872] env[62070]: DEBUG nova.compute.manager [req-69671eeb-1670-476f-96a9-bcaa016d6f5d req-28d82c48-e84b-41a1-8ddb-bca2df819fe3 service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Refreshing instance network info cache due to event network-changed-c06feb60-bfb1-47ea-8764-52391d9b0b78. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 935.045673] env[62070]: DEBUG oslo_concurrency.lockutils [req-69671eeb-1670-476f-96a9-bcaa016d6f5d req-28d82c48-e84b-41a1-8ddb-bca2df819fe3 service nova] Acquiring lock "refresh_cache-cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.045673] env[62070]: DEBUG oslo_concurrency.lockutils [req-69671eeb-1670-476f-96a9-bcaa016d6f5d req-28d82c48-e84b-41a1-8ddb-bca2df819fe3 service nova] Acquired lock "refresh_cache-cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.045673] env[62070]: DEBUG nova.network.neutron [req-69671eeb-1670-476f-96a9-bcaa016d6f5d req-28d82c48-e84b-41a1-8ddb-bca2df819fe3 service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Refreshing network info cache for port c06feb60-bfb1-47ea-8764-52391d9b0b78 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 935.224120] env[62070]: DEBUG nova.scheduler.client.report [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 935.278336] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122071, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066346} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.278794] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 935.279788] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd535f9-b370-4325-8bfd-648f9a9020d7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.303752] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] 1440361b-d3b4-4c1c-995c-fe7ff99ee297/1440361b-d3b4-4c1c-995c-fe7ff99ee297.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 935.305093] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18d715a5-54b0-4e8e-bac9-6766e5c58b1b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.326942] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Waiting for the task: (returnval){ [ 935.326942] env[62070]: value = "task-1122072" [ 935.326942] env[62070]: _type = "Task" [ 935.326942] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.335568] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122072, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.732477] env[62070]: DEBUG oslo_concurrency.lockutils [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.356s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.733140] env[62070]: DEBUG nova.compute.manager [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 935.737088] env[62070]: DEBUG oslo_concurrency.lockutils [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.692s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.738897] env[62070]: INFO nova.compute.claims [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 935.763222] env[62070]: DEBUG nova.network.neutron [req-69671eeb-1670-476f-96a9-bcaa016d6f5d req-28d82c48-e84b-41a1-8ddb-bca2df819fe3 service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Updated VIF entry in instance network info cache for port c06feb60-bfb1-47ea-8764-52391d9b0b78. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 935.763607] env[62070]: DEBUG nova.network.neutron [req-69671eeb-1670-476f-96a9-bcaa016d6f5d req-28d82c48-e84b-41a1-8ddb-bca2df819fe3 service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Updating instance_info_cache with network_info: [{"id": "c06feb60-bfb1-47ea-8764-52391d9b0b78", "address": "fa:16:3e:66:3c:b1", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc06feb60-bf", "ovs_interfaceid": "c06feb60-bfb1-47ea-8764-52391d9b0b78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.838382] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122072, 'name': ReconfigVM_Task, 'duration_secs': 0.295242} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.838382] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Reconfigured VM instance instance-00000052 to attach disk [datastore2] 1440361b-d3b4-4c1c-995c-fe7ff99ee297/1440361b-d3b4-4c1c-995c-fe7ff99ee297.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 935.838382] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2f5de6ae-c08e-4bcf-a5fc-1b083c88d086 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.844547] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Waiting for the task: (returnval){ [ 935.844547] env[62070]: value = "task-1122073" [ 935.844547] env[62070]: _type = "Task" [ 935.844547] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.852116] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122073, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.243546] env[62070]: DEBUG nova.compute.utils [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 936.246876] env[62070]: DEBUG nova.compute.manager [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 936.247086] env[62070]: DEBUG nova.network.neutron [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 936.268389] env[62070]: DEBUG oslo_concurrency.lockutils [req-69671eeb-1670-476f-96a9-bcaa016d6f5d req-28d82c48-e84b-41a1-8ddb-bca2df819fe3 service nova] Releasing lock "refresh_cache-cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.268689] env[62070]: DEBUG nova.compute.manager [req-69671eeb-1670-476f-96a9-bcaa016d6f5d req-28d82c48-e84b-41a1-8ddb-bca2df819fe3 service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Received event network-changed-c06feb60-bfb1-47ea-8764-52391d9b0b78 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 936.268870] env[62070]: DEBUG nova.compute.manager [req-69671eeb-1670-476f-96a9-bcaa016d6f5d req-28d82c48-e84b-41a1-8ddb-bca2df819fe3 service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Refreshing instance network info cache due to event network-changed-c06feb60-bfb1-47ea-8764-52391d9b0b78. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 936.269144] env[62070]: DEBUG oslo_concurrency.lockutils [req-69671eeb-1670-476f-96a9-bcaa016d6f5d req-28d82c48-e84b-41a1-8ddb-bca2df819fe3 service nova] Acquiring lock "refresh_cache-cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.269301] env[62070]: DEBUG oslo_concurrency.lockutils [req-69671eeb-1670-476f-96a9-bcaa016d6f5d req-28d82c48-e84b-41a1-8ddb-bca2df819fe3 service nova] Acquired lock "refresh_cache-cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.269479] env[62070]: DEBUG nova.network.neutron [req-69671eeb-1670-476f-96a9-bcaa016d6f5d req-28d82c48-e84b-41a1-8ddb-bca2df819fe3 service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Refreshing network info cache for port c06feb60-bfb1-47ea-8764-52391d9b0b78 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 936.294590] env[62070]: DEBUG nova.policy [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0aa820b3e16d4d6fbc6bda0b232025fc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c91e5eeeeb1742f499b2edaf76a93a3b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 936.355087] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122073, 'name': Rename_Task, 'duration_secs': 0.118333} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.355424] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 936.355736] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-089f8319-e91e-4672-b846-fc0546aecc13 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.362707] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Waiting for the task: (returnval){ [ 936.362707] env[62070]: value = "task-1122074" [ 936.362707] env[62070]: _type = "Task" [ 936.362707] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.372715] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122074, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.586169] env[62070]: DEBUG nova.network.neutron [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Successfully created port: 0e90b544-5a90-4009-8f52-635e393cf106 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 936.748232] env[62070]: DEBUG nova.compute.manager [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 936.825593] env[62070]: DEBUG oslo_concurrency.lockutils [None req-068a16d2-1f3a-48e7-9288-f43eac4f4395 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "7dc27fe6-495f-498d-88fe-a99ddc19a21c" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.825878] env[62070]: DEBUG oslo_concurrency.lockutils [None req-068a16d2-1f3a-48e7-9288-f43eac4f4395 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "7dc27fe6-495f-498d-88fe-a99ddc19a21c" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.877316] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122074, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.991165] env[62070]: DEBUG nova.network.neutron [req-69671eeb-1670-476f-96a9-bcaa016d6f5d req-28d82c48-e84b-41a1-8ddb-bca2df819fe3 service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Updated VIF entry in instance network info cache for port c06feb60-bfb1-47ea-8764-52391d9b0b78. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 936.991588] env[62070]: DEBUG nova.network.neutron [req-69671eeb-1670-476f-96a9-bcaa016d6f5d req-28d82c48-e84b-41a1-8ddb-bca2df819fe3 service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Updating instance_info_cache with network_info: [{"id": "c06feb60-bfb1-47ea-8764-52391d9b0b78", "address": "fa:16:3e:66:3c:b1", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc06feb60-bf", "ovs_interfaceid": "c06feb60-bfb1-47ea-8764-52391d9b0b78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.044405] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0d38cc1-3087-46bc-a1ee-145670216306 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.054913] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870d22d0-517a-47b9-88b3-f766e38a23f7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.086255] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a1f851-44aa-492b-bbd7-d59895407d7b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.094553] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beab2344-9740-4faa-92c8-7ba9c3bc8183 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.110179] env[62070]: DEBUG nova.compute.provider_tree [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 937.330033] env[62070]: DEBUG nova.compute.utils [None req-068a16d2-1f3a-48e7-9288-f43eac4f4395 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 937.376312] env[62070]: DEBUG oslo_vmware.api [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122074, 'name': PowerOnVM_Task, 'duration_secs': 0.976717} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.376632] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 937.376847] env[62070]: INFO nova.compute.manager [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Took 5.84 seconds to spawn the instance on the hypervisor. [ 937.377045] env[62070]: DEBUG nova.compute.manager [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 937.377912] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a973a4f-5d91-4aec-93e5-a30c7e00e890 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.494173] env[62070]: DEBUG oslo_concurrency.lockutils [req-69671eeb-1670-476f-96a9-bcaa016d6f5d req-28d82c48-e84b-41a1-8ddb-bca2df819fe3 service nova] Releasing lock "refresh_cache-cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.494517] env[62070]: DEBUG nova.compute.manager [req-69671eeb-1670-476f-96a9-bcaa016d6f5d req-28d82c48-e84b-41a1-8ddb-bca2df819fe3 service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Received event network-changed-45420f68-e309-4569-8dac-28e16d9417d7 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 937.494851] env[62070]: DEBUG nova.compute.manager [req-69671eeb-1670-476f-96a9-bcaa016d6f5d req-28d82c48-e84b-41a1-8ddb-bca2df819fe3 service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Refreshing instance network info cache due to event network-changed-45420f68-e309-4569-8dac-28e16d9417d7. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 937.494919] env[62070]: DEBUG oslo_concurrency.lockutils [req-69671eeb-1670-476f-96a9-bcaa016d6f5d req-28d82c48-e84b-41a1-8ddb-bca2df819fe3 service nova] Acquiring lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.495065] env[62070]: DEBUG oslo_concurrency.lockutils [req-69671eeb-1670-476f-96a9-bcaa016d6f5d req-28d82c48-e84b-41a1-8ddb-bca2df819fe3 service nova] Acquired lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.495241] env[62070]: DEBUG nova.network.neutron [req-69671eeb-1670-476f-96a9-bcaa016d6f5d req-28d82c48-e84b-41a1-8ddb-bca2df819fe3 service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Refreshing network info cache for port 45420f68-e309-4569-8dac-28e16d9417d7 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 937.612594] env[62070]: DEBUG nova.scheduler.client.report [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 937.761426] env[62070]: DEBUG nova.compute.manager [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 937.787679] env[62070]: DEBUG nova.virt.hardware [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 937.787948] env[62070]: DEBUG nova.virt.hardware [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 937.788154] env[62070]: DEBUG nova.virt.hardware [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 937.788520] env[62070]: DEBUG nova.virt.hardware [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 937.788520] env[62070]: DEBUG nova.virt.hardware [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 937.788651] env[62070]: DEBUG nova.virt.hardware [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 937.788890] env[62070]: DEBUG nova.virt.hardware [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 937.789076] env[62070]: DEBUG nova.virt.hardware [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 937.789255] env[62070]: DEBUG nova.virt.hardware [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 937.789445] env[62070]: DEBUG nova.virt.hardware [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 937.789666] env[62070]: DEBUG nova.virt.hardware [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 937.790596] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-700a75a3-4156-4a09-afb5-750158f4cbdd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.799362] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3939b4d-104a-4b1c-bd86-a42be3a4269c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.833920] env[62070]: DEBUG oslo_concurrency.lockutils [None req-068a16d2-1f3a-48e7-9288-f43eac4f4395 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "7dc27fe6-495f-498d-88fe-a99ddc19a21c" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.898725] env[62070]: INFO nova.compute.manager [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Took 32.15 seconds to build instance. [ 937.982324] env[62070]: DEBUG nova.compute.manager [req-db0bfd20-469b-4313-80ce-51a8a4fd4114 req-d4a4743b-4934-41b1-83c4-50918164e5df service nova] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Received event network-vif-plugged-0e90b544-5a90-4009-8f52-635e393cf106 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 937.982542] env[62070]: DEBUG oslo_concurrency.lockutils [req-db0bfd20-469b-4313-80ce-51a8a4fd4114 req-d4a4743b-4934-41b1-83c4-50918164e5df service nova] Acquiring lock "84c00e4a-20d3-4739-8535-e27076d85a89-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.982761] env[62070]: DEBUG oslo_concurrency.lockutils [req-db0bfd20-469b-4313-80ce-51a8a4fd4114 req-d4a4743b-4934-41b1-83c4-50918164e5df service nova] Lock "84c00e4a-20d3-4739-8535-e27076d85a89-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.982944] env[62070]: DEBUG oslo_concurrency.lockutils [req-db0bfd20-469b-4313-80ce-51a8a4fd4114 req-d4a4743b-4934-41b1-83c4-50918164e5df service nova] Lock "84c00e4a-20d3-4739-8535-e27076d85a89-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.983416] env[62070]: DEBUG nova.compute.manager [req-db0bfd20-469b-4313-80ce-51a8a4fd4114 req-d4a4743b-4934-41b1-83c4-50918164e5df service nova] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] No waiting events found dispatching network-vif-plugged-0e90b544-5a90-4009-8f52-635e393cf106 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 937.983670] env[62070]: WARNING nova.compute.manager [req-db0bfd20-469b-4313-80ce-51a8a4fd4114 req-d4a4743b-4934-41b1-83c4-50918164e5df service nova] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Received unexpected event network-vif-plugged-0e90b544-5a90-4009-8f52-635e393cf106 for instance with vm_state building and task_state spawning. [ 938.072575] env[62070]: DEBUG nova.network.neutron [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Successfully updated port: 0e90b544-5a90-4009-8f52-635e393cf106 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 938.117179] env[62070]: DEBUG oslo_concurrency.lockutils [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.380s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.117724] env[62070]: DEBUG nova.compute.manager [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 938.120611] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.973s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.121587] env[62070]: INFO nova.compute.claims [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 938.232153] env[62070]: DEBUG nova.network.neutron [req-69671eeb-1670-476f-96a9-bcaa016d6f5d req-28d82c48-e84b-41a1-8ddb-bca2df819fe3 service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Updated VIF entry in instance network info cache for port 45420f68-e309-4569-8dac-28e16d9417d7. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 938.232537] env[62070]: DEBUG nova.network.neutron [req-69671eeb-1670-476f-96a9-bcaa016d6f5d req-28d82c48-e84b-41a1-8ddb-bca2df819fe3 service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Updating instance_info_cache with network_info: [{"id": "45420f68-e309-4569-8dac-28e16d9417d7", "address": "fa:16:3e:08:c2:70", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.203", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45420f68-e3", "ovs_interfaceid": "45420f68-e309-4569-8dac-28e16d9417d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.400915] env[62070]: DEBUG oslo_concurrency.lockutils [None req-11f65eb1-44d2-44c1-8a02-a34481cdec91 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Lock "1440361b-d3b4-4c1c-995c-fe7ff99ee297" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.657s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.575731] env[62070]: DEBUG oslo_concurrency.lockutils [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "refresh_cache-84c00e4a-20d3-4739-8535-e27076d85a89" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.575964] env[62070]: DEBUG oslo_concurrency.lockutils [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquired lock "refresh_cache-84c00e4a-20d3-4739-8535-e27076d85a89" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.576079] env[62070]: DEBUG nova.network.neutron [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 938.626168] env[62070]: DEBUG nova.compute.utils [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 938.630814] env[62070]: DEBUG nova.compute.manager [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 938.631087] env[62070]: DEBUG nova.network.neutron [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 938.685201] env[62070]: DEBUG nova.policy [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f866f97eed1a41b39b4cd552102c6e21', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9191f0e6c2ee401abca64c0780e230bf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 938.735405] env[62070]: DEBUG oslo_concurrency.lockutils [req-69671eeb-1670-476f-96a9-bcaa016d6f5d req-28d82c48-e84b-41a1-8ddb-bca2df819fe3 service nova] Releasing lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.837050] env[62070]: INFO nova.compute.manager [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Rebuilding instance [ 938.877844] env[62070]: DEBUG nova.compute.manager [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 938.878781] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-327c00f9-2e07-4e0c-b114-dbbcefcf4714 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.910954] env[62070]: DEBUG oslo_concurrency.lockutils [None req-068a16d2-1f3a-48e7-9288-f43eac4f4395 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "7dc27fe6-495f-498d-88fe-a99ddc19a21c" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.910954] env[62070]: DEBUG oslo_concurrency.lockutils [None req-068a16d2-1f3a-48e7-9288-f43eac4f4395 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "7dc27fe6-495f-498d-88fe-a99ddc19a21c" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.911063] env[62070]: INFO nova.compute.manager [None req-068a16d2-1f3a-48e7-9288-f43eac4f4395 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Attaching volume c400637e-5e41-43a1-91df-a0f4d9c4e253 to /dev/sdb [ 938.935045] env[62070]: DEBUG nova.network.neutron [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Successfully created port: 7fbe6487-eae6-49f2-894f-82f8519f4232 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 938.945908] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48eae521-68a2-4458-b4cd-45ee159ecfab {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.954381] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d72667cb-41c1-49e2-92ba-b12255af3053 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.966477] env[62070]: DEBUG nova.virt.block_device [None req-068a16d2-1f3a-48e7-9288-f43eac4f4395 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Updating existing volume attachment record: d0597c51-d17e-40d4-8f42-a75a7979d387 {{(pid=62070) _volume_attach /opt/stack/nova/nova/virt/block_device.py:679}} [ 939.117567] env[62070]: DEBUG nova.network.neutron [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 939.131288] env[62070]: DEBUG nova.compute.manager [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 939.269915] env[62070]: DEBUG nova.network.neutron [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Updating instance_info_cache with network_info: [{"id": "0e90b544-5a90-4009-8f52-635e393cf106", "address": "fa:16:3e:48:de:00", "network": {"id": "0d81bd04-b549-4e1f-97a2-0a0b9391dd3f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-108214409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c91e5eeeeb1742f499b2edaf76a93a3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e90b544-5a", "ovs_interfaceid": "0e90b544-5a90-4009-8f52-635e393cf106", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.381630] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e21fcaa8-75ec-409c-8e93-9a5b9e77598e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.389241] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 939.389528] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-331b90be-0038-407f-972a-ae6a4b9f9917 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.391606] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fb731d3-f938-4298-bfca-b6a55ca49004 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.425491] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21cfc3df-99c2-4906-a285-dc35912f7141 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.428778] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Waiting for the task: (returnval){ [ 939.428778] env[62070]: value = "task-1122078" [ 939.428778] env[62070]: _type = "Task" [ 939.428778] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.436279] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81537a8d-aca8-47cc-9578-93ccf4880beb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.444803] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122078, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.454722] env[62070]: DEBUG nova.compute.provider_tree [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 939.772566] env[62070]: DEBUG oslo_concurrency.lockutils [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Releasing lock "refresh_cache-84c00e4a-20d3-4739-8535-e27076d85a89" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.772951] env[62070]: DEBUG nova.compute.manager [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Instance network_info: |[{"id": "0e90b544-5a90-4009-8f52-635e393cf106", "address": "fa:16:3e:48:de:00", "network": {"id": "0d81bd04-b549-4e1f-97a2-0a0b9391dd3f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-108214409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c91e5eeeeb1742f499b2edaf76a93a3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e90b544-5a", "ovs_interfaceid": "0e90b544-5a90-4009-8f52-635e393cf106", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 939.773447] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:48:de:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cf5bfbae-a882-4d34-be33-b31e274b3077', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0e90b544-5a90-4009-8f52-635e393cf106', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 939.781223] env[62070]: DEBUG oslo.service.loopingcall [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 939.781467] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 939.781738] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3aaa18c8-5b76-42b7-a323-238b9853e2e2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.801388] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 939.801388] env[62070]: value = "task-1122079" [ 939.801388] env[62070]: _type = "Task" [ 939.801388] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.809642] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122079, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.939055] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122078, 'name': PowerOffVM_Task, 'duration_secs': 0.191338} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.939462] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 939.939586] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 939.940299] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa9fff78-53ae-416a-8f06-81c5f1dea904 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.946528] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 939.946742] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-60c665d0-ba44-4c19-9a76-c8907adadb55 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.958026] env[62070]: DEBUG nova.scheduler.client.report [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 939.970909] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 939.971163] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 939.971418] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Deleting the datastore file [datastore2] 1440361b-d3b4-4c1c-995c-fe7ff99ee297 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 939.971711] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce7fbe6f-0ab7-4599-97bc-550e8edd67a8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.978511] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Waiting for the task: (returnval){ [ 939.978511] env[62070]: value = "task-1122081" [ 939.978511] env[62070]: _type = "Task" [ 939.978511] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.987966] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122081, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.011387] env[62070]: DEBUG nova.compute.manager [req-28265095-4103-4c16-ad83-4425527e8ed0 req-e4d10b38-d8c6-4524-aa10-386f3f099099 service nova] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Received event network-changed-0e90b544-5a90-4009-8f52-635e393cf106 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 940.011606] env[62070]: DEBUG nova.compute.manager [req-28265095-4103-4c16-ad83-4425527e8ed0 req-e4d10b38-d8c6-4524-aa10-386f3f099099 service nova] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Refreshing instance network info cache due to event network-changed-0e90b544-5a90-4009-8f52-635e393cf106. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 940.011840] env[62070]: DEBUG oslo_concurrency.lockutils [req-28265095-4103-4c16-ad83-4425527e8ed0 req-e4d10b38-d8c6-4524-aa10-386f3f099099 service nova] Acquiring lock "refresh_cache-84c00e4a-20d3-4739-8535-e27076d85a89" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.012029] env[62070]: DEBUG oslo_concurrency.lockutils [req-28265095-4103-4c16-ad83-4425527e8ed0 req-e4d10b38-d8c6-4524-aa10-386f3f099099 service nova] Acquired lock "refresh_cache-84c00e4a-20d3-4739-8535-e27076d85a89" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.012321] env[62070]: DEBUG nova.network.neutron [req-28265095-4103-4c16-ad83-4425527e8ed0 req-e4d10b38-d8c6-4524-aa10-386f3f099099 service nova] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Refreshing network info cache for port 0e90b544-5a90-4009-8f52-635e393cf106 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 940.141246] env[62070]: DEBUG nova.compute.manager [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 940.166662] env[62070]: DEBUG nova.virt.hardware [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 940.167011] env[62070]: DEBUG nova.virt.hardware [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 940.167205] env[62070]: DEBUG nova.virt.hardware [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 940.167546] env[62070]: DEBUG nova.virt.hardware [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 940.167760] env[62070]: DEBUG nova.virt.hardware [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 940.167952] env[62070]: DEBUG nova.virt.hardware [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 940.168196] env[62070]: DEBUG nova.virt.hardware [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 940.168458] env[62070]: DEBUG nova.virt.hardware [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 940.168692] env[62070]: DEBUG nova.virt.hardware [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 940.168875] env[62070]: DEBUG nova.virt.hardware [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 940.169077] env[62070]: DEBUG nova.virt.hardware [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 940.170015] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa567804-9a24-4ad3-8726-b76c3d22ca4e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.178021] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00908639-ec0d-492b-8b00-9f2f9b01ae08 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.311068] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122079, 'name': CreateVM_Task, 'duration_secs': 0.32063} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.311279] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 940.311967] env[62070]: DEBUG oslo_concurrency.lockutils [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.312155] env[62070]: DEBUG oslo_concurrency.lockutils [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.312489] env[62070]: DEBUG oslo_concurrency.lockutils [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 940.312751] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c8942a0-d133-42de-85ad-36d8c103c02c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.316945] env[62070]: DEBUG oslo_vmware.api [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 940.316945] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52923c24-c6ae-5ed3-c095-bbd96d2ed9eb" [ 940.316945] env[62070]: _type = "Task" [ 940.316945] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.324229] env[62070]: DEBUG oslo_vmware.api [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52923c24-c6ae-5ed3-c095-bbd96d2ed9eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.382326] env[62070]: DEBUG nova.compute.manager [req-e8b1aec2-bc6b-477a-94d5-ef91a0493fe5 req-69532a9f-b9e6-40ad-97ba-74ed8f8b1841 service nova] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Received event network-vif-plugged-7fbe6487-eae6-49f2-894f-82f8519f4232 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 940.382554] env[62070]: DEBUG oslo_concurrency.lockutils [req-e8b1aec2-bc6b-477a-94d5-ef91a0493fe5 req-69532a9f-b9e6-40ad-97ba-74ed8f8b1841 service nova] Acquiring lock "e850734f-c49c-46d7-87ab-b0d6bed89d9b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.382777] env[62070]: DEBUG oslo_concurrency.lockutils [req-e8b1aec2-bc6b-477a-94d5-ef91a0493fe5 req-69532a9f-b9e6-40ad-97ba-74ed8f8b1841 service nova] Lock "e850734f-c49c-46d7-87ab-b0d6bed89d9b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.382947] env[62070]: DEBUG oslo_concurrency.lockutils [req-e8b1aec2-bc6b-477a-94d5-ef91a0493fe5 req-69532a9f-b9e6-40ad-97ba-74ed8f8b1841 service nova] Lock "e850734f-c49c-46d7-87ab-b0d6bed89d9b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.383136] env[62070]: DEBUG nova.compute.manager [req-e8b1aec2-bc6b-477a-94d5-ef91a0493fe5 req-69532a9f-b9e6-40ad-97ba-74ed8f8b1841 service nova] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] No waiting events found dispatching network-vif-plugged-7fbe6487-eae6-49f2-894f-82f8519f4232 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 940.383307] env[62070]: WARNING nova.compute.manager [req-e8b1aec2-bc6b-477a-94d5-ef91a0493fe5 req-69532a9f-b9e6-40ad-97ba-74ed8f8b1841 service nova] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Received unexpected event network-vif-plugged-7fbe6487-eae6-49f2-894f-82f8519f4232 for instance with vm_state building and task_state spawning. [ 940.455208] env[62070]: DEBUG nova.network.neutron [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Successfully updated port: 7fbe6487-eae6-49f2-894f-82f8519f4232 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 940.463238] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.343s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.463766] env[62070]: DEBUG nova.compute.manager [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 940.466585] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.148s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.467963] env[62070]: INFO nova.compute.claims [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 940.491066] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122081, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107346} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.491347] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 940.491537] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 940.491722] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 940.702564] env[62070]: DEBUG nova.network.neutron [req-28265095-4103-4c16-ad83-4425527e8ed0 req-e4d10b38-d8c6-4524-aa10-386f3f099099 service nova] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Updated VIF entry in instance network info cache for port 0e90b544-5a90-4009-8f52-635e393cf106. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 940.702926] env[62070]: DEBUG nova.network.neutron [req-28265095-4103-4c16-ad83-4425527e8ed0 req-e4d10b38-d8c6-4524-aa10-386f3f099099 service nova] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Updating instance_info_cache with network_info: [{"id": "0e90b544-5a90-4009-8f52-635e393cf106", "address": "fa:16:3e:48:de:00", "network": {"id": "0d81bd04-b549-4e1f-97a2-0a0b9391dd3f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-108214409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c91e5eeeeb1742f499b2edaf76a93a3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e90b544-5a", "ovs_interfaceid": "0e90b544-5a90-4009-8f52-635e393cf106", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.827097] env[62070]: DEBUG oslo_vmware.api [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52923c24-c6ae-5ed3-c095-bbd96d2ed9eb, 'name': SearchDatastore_Task, 'duration_secs': 0.008269} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.827379] env[62070]: DEBUG oslo_concurrency.lockutils [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.828038] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 940.828038] env[62070]: DEBUG oslo_concurrency.lockutils [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.828038] env[62070]: DEBUG oslo_concurrency.lockutils [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.828220] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 940.828458] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-134edd59-5258-47eb-88f1-cdc2752c3937 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.836225] env[62070]: DEBUG oslo_concurrency.lockutils [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "interface-21bcb1a6-833b-48f3-8ee2-0e49c64a104f-60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.836465] env[62070]: DEBUG oslo_concurrency.lockutils [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "interface-21bcb1a6-833b-48f3-8ee2-0e49c64a104f-60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.836819] env[62070]: DEBUG nova.objects.instance [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lazy-loading 'flavor' on Instance uuid 21bcb1a6-833b-48f3-8ee2-0e49c64a104f {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 940.838819] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 940.839055] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 940.839691] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3fa314b-7a60-4601-aea4-afa53ce53718 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.845738] env[62070]: DEBUG oslo_vmware.api [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 940.845738] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]525275f4-7e44-0acc-3902-1cc7bb680e9f" [ 940.845738] env[62070]: _type = "Task" [ 940.845738] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.853483] env[62070]: DEBUG oslo_vmware.api [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]525275f4-7e44-0acc-3902-1cc7bb680e9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.958993] env[62070]: DEBUG oslo_concurrency.lockutils [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "refresh_cache-e850734f-c49c-46d7-87ab-b0d6bed89d9b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.959244] env[62070]: DEBUG oslo_concurrency.lockutils [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired lock "refresh_cache-e850734f-c49c-46d7-87ab-b0d6bed89d9b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.959444] env[62070]: DEBUG nova.network.neutron [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 940.971960] env[62070]: DEBUG nova.compute.utils [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 940.975627] env[62070]: DEBUG nova.compute.manager [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 940.975814] env[62070]: DEBUG nova.network.neutron [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 941.014929] env[62070]: DEBUG nova.policy [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0ab707a4862f42199fc2a91733563cde', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f29ac48ab6544ec0bd1d210aec05dbc5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 941.205945] env[62070]: DEBUG oslo_concurrency.lockutils [req-28265095-4103-4c16-ad83-4425527e8ed0 req-e4d10b38-d8c6-4524-aa10-386f3f099099 service nova] Releasing lock "refresh_cache-84c00e4a-20d3-4739-8535-e27076d85a89" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.263841] env[62070]: DEBUG nova.network.neutron [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Successfully created port: 8876137b-4c95-4f50-8bf9-ad7d44ac5052 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 941.357765] env[62070]: DEBUG oslo_vmware.api [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]525275f4-7e44-0acc-3902-1cc7bb680e9f, 'name': SearchDatastore_Task, 'duration_secs': 0.008484} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.358596] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f408e85c-1003-4112-b092-0ba8e3652b44 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.363736] env[62070]: DEBUG oslo_vmware.api [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 941.363736] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52c7e567-48f5-c48e-9adf-7e1b9f0d38b1" [ 941.363736] env[62070]: _type = "Task" [ 941.363736] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.373079] env[62070]: DEBUG oslo_vmware.api [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52c7e567-48f5-c48e-9adf-7e1b9f0d38b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.446333] env[62070]: DEBUG nova.objects.instance [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lazy-loading 'pci_requests' on Instance uuid 21bcb1a6-833b-48f3-8ee2-0e49c64a104f {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 941.476425] env[62070]: DEBUG nova.compute.manager [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 941.491958] env[62070]: DEBUG nova.network.neutron [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 941.525902] env[62070]: DEBUG nova.virt.hardware [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 941.526074] env[62070]: DEBUG nova.virt.hardware [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 941.526244] env[62070]: DEBUG nova.virt.hardware [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 941.526436] env[62070]: DEBUG nova.virt.hardware [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 941.526590] env[62070]: DEBUG nova.virt.hardware [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 941.526743] env[62070]: DEBUG nova.virt.hardware [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 941.526956] env[62070]: DEBUG nova.virt.hardware [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 941.527190] env[62070]: DEBUG nova.virt.hardware [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 941.527322] env[62070]: DEBUG nova.virt.hardware [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 941.527513] env[62070]: DEBUG nova.virt.hardware [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 941.527717] env[62070]: DEBUG nova.virt.hardware [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 941.528895] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce53700-d326-4de1-8f3c-bdadb49ff76f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.541021] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d365a0a-a59a-4ee3-a1e8-e76a3bb65d22 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.554868] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Instance VIF info [] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 941.560709] env[62070]: DEBUG oslo.service.loopingcall [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 941.563148] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 941.565795] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0b15a1af-f55c-4a00-a09a-bef895de22ae {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.584146] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 941.584146] env[62070]: value = "task-1122083" [ 941.584146] env[62070]: _type = "Task" [ 941.584146] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.595388] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122083, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.657063] env[62070]: DEBUG nova.network.neutron [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Updating instance_info_cache with network_info: [{"id": "7fbe6487-eae6-49f2-894f-82f8519f4232", "address": "fa:16:3e:9e:62:34", "network": {"id": "5ea0fffc-372c-450e-b27b-10959077d58f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1853458988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9191f0e6c2ee401abca64c0780e230bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3c995e9-7f2f-420c-880a-d60da6e708ad", "external-id": "nsx-vlan-transportzone-166", "segmentation_id": 166, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fbe6487-ea", "ovs_interfaceid": "7fbe6487-eae6-49f2-894f-82f8519f4232", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.751007] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fce775b-45c3-4075-abb2-d8b3207642c3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.758757] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e279af-0b37-42b8-92ec-efb03a543268 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.789921] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-330a3fad-de69-4a82-9b63-86eda287e4dd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.797323] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0d06a1-2b91-4bef-9f70-dc3e1440a409 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.810691] env[62070]: DEBUG nova.compute.provider_tree [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.873914] env[62070]: DEBUG oslo_vmware.api [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52c7e567-48f5-c48e-9adf-7e1b9f0d38b1, 'name': SearchDatastore_Task, 'duration_secs': 0.008335} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.874280] env[62070]: DEBUG oslo_concurrency.lockutils [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.875390] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 84c00e4a-20d3-4739-8535-e27076d85a89/84c00e4a-20d3-4739-8535-e27076d85a89.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 941.875686] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c19b42aa-a5f4-4dc8-a164-c7d9675c3305 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.882734] env[62070]: DEBUG oslo_vmware.api [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 941.882734] env[62070]: value = "task-1122084" [ 941.882734] env[62070]: _type = "Task" [ 941.882734] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.890574] env[62070]: DEBUG oslo_vmware.api [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122084, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.949671] env[62070]: DEBUG nova.objects.base [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Object Instance<21bcb1a6-833b-48f3-8ee2-0e49c64a104f> lazy-loaded attributes: flavor,pci_requests {{(pid=62070) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 941.950009] env[62070]: DEBUG nova.network.neutron [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 942.072479] env[62070]: DEBUG nova.policy [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7534320dee8f486e90f5174aa94d00bd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '925dff51764c4b56ae7ea05fbde2ecdd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 942.095101] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122083, 'name': CreateVM_Task, 'duration_secs': 0.240655} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.095342] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 942.095737] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.095908] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.096271] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 942.096575] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50b9b29e-88b6-4b8f-8de9-935e168aae1f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.102535] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Waiting for the task: (returnval){ [ 942.102535] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52864dd4-48ae-e1b9-4660-ecc70ba4f20b" [ 942.102535] env[62070]: _type = "Task" [ 942.102535] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.112360] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52864dd4-48ae-e1b9-4660-ecc70ba4f20b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.160349] env[62070]: DEBUG oslo_concurrency.lockutils [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Releasing lock "refresh_cache-e850734f-c49c-46d7-87ab-b0d6bed89d9b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.160658] env[62070]: DEBUG nova.compute.manager [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Instance network_info: |[{"id": "7fbe6487-eae6-49f2-894f-82f8519f4232", "address": "fa:16:3e:9e:62:34", "network": {"id": "5ea0fffc-372c-450e-b27b-10959077d58f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1853458988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9191f0e6c2ee401abca64c0780e230bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3c995e9-7f2f-420c-880a-d60da6e708ad", "external-id": "nsx-vlan-transportzone-166", "segmentation_id": 166, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fbe6487-ea", "ovs_interfaceid": "7fbe6487-eae6-49f2-894f-82f8519f4232", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 942.161526] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:62:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3c995e9-7f2f-420c-880a-d60da6e708ad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7fbe6487-eae6-49f2-894f-82f8519f4232', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 942.170088] env[62070]: DEBUG oslo.service.loopingcall [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 942.170414] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 942.170670] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e553443-40c3-4ce4-8edc-63a0c1cb700d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.192590] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 942.192590] env[62070]: value = "task-1122085" [ 942.192590] env[62070]: _type = "Task" [ 942.192590] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.204288] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122085, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.313217] env[62070]: DEBUG nova.scheduler.client.report [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 942.392184] env[62070]: DEBUG oslo_vmware.api [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122084, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.422619} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.392514] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 84c00e4a-20d3-4739-8535-e27076d85a89/84c00e4a-20d3-4739-8535-e27076d85a89.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 942.392783] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 942.393080] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7ae6d78a-ac77-4f7a-8a18-5820175bcfe6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.399534] env[62070]: DEBUG oslo_vmware.api [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 942.399534] env[62070]: value = "task-1122086" [ 942.399534] env[62070]: _type = "Task" [ 942.399534] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.407394] env[62070]: DEBUG oslo_vmware.api [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122086, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.491099] env[62070]: DEBUG nova.compute.manager [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 942.518027] env[62070]: DEBUG nova.virt.hardware [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 942.518027] env[62070]: DEBUG nova.virt.hardware [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 942.518027] env[62070]: DEBUG nova.virt.hardware [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 942.518027] env[62070]: DEBUG nova.virt.hardware [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 942.518414] env[62070]: DEBUG nova.virt.hardware [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 942.518414] env[62070]: DEBUG nova.virt.hardware [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 942.518492] env[62070]: DEBUG nova.virt.hardware [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 942.518639] env[62070]: DEBUG nova.virt.hardware [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 942.518825] env[62070]: DEBUG nova.virt.hardware [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 942.519042] env[62070]: DEBUG nova.virt.hardware [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 942.519265] env[62070]: DEBUG nova.virt.hardware [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 942.520235] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e56e334-7391-47a6-ba50-998f2966d905 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.528761] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac458fa-54da-4152-bf11-46622b640511 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.535706] env[62070]: DEBUG nova.compute.manager [req-b7e00efb-8535-4bd7-91c9-3f6c177d4d53 req-34f86737-4364-457a-80c6-11d2e569eddc service nova] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Received event network-changed-7fbe6487-eae6-49f2-894f-82f8519f4232 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 942.535845] env[62070]: DEBUG nova.compute.manager [req-b7e00efb-8535-4bd7-91c9-3f6c177d4d53 req-34f86737-4364-457a-80c6-11d2e569eddc service nova] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Refreshing instance network info cache due to event network-changed-7fbe6487-eae6-49f2-894f-82f8519f4232. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 942.536093] env[62070]: DEBUG oslo_concurrency.lockutils [req-b7e00efb-8535-4bd7-91c9-3f6c177d4d53 req-34f86737-4364-457a-80c6-11d2e569eddc service nova] Acquiring lock "refresh_cache-e850734f-c49c-46d7-87ab-b0d6bed89d9b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.536277] env[62070]: DEBUG oslo_concurrency.lockutils [req-b7e00efb-8535-4bd7-91c9-3f6c177d4d53 req-34f86737-4364-457a-80c6-11d2e569eddc service nova] Acquired lock "refresh_cache-e850734f-c49c-46d7-87ab-b0d6bed89d9b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.536468] env[62070]: DEBUG nova.network.neutron [req-b7e00efb-8535-4bd7-91c9-3f6c177d4d53 req-34f86737-4364-457a-80c6-11d2e569eddc service nova] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Refreshing network info cache for port 7fbe6487-eae6-49f2-894f-82f8519f4232 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 942.612409] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52864dd4-48ae-e1b9-4660-ecc70ba4f20b, 'name': SearchDatastore_Task, 'duration_secs': 0.055946} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.612519] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.612702] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 942.612940] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.613105] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.613296] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 942.613770] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e9b925ec-d0e3-495f-9679-03b750922fb7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.621542] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 942.621686] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 942.622362] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13bb7864-f16d-41f4-8dbc-ed615a519ac0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.629839] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Waiting for the task: (returnval){ [ 942.629839] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]524a6b28-295f-71c9-0104-a71f922c8d0a" [ 942.629839] env[62070]: _type = "Task" [ 942.629839] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.637482] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]524a6b28-295f-71c9-0104-a71f922c8d0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.701966] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122085, 'name': CreateVM_Task, 'duration_secs': 0.353168} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.702219] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 942.703057] env[62070]: DEBUG oslo_concurrency.lockutils [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.703057] env[62070]: DEBUG oslo_concurrency.lockutils [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.703394] env[62070]: DEBUG oslo_concurrency.lockutils [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 942.703653] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8beebad4-95d5-4189-bb1a-23bdcf3cee44 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.708553] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 942.708553] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52a25bc8-c0c1-5cb5-02a3-3d3fec13adbe" [ 942.708553] env[62070]: _type = "Task" [ 942.708553] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.716885] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52a25bc8-c0c1-5cb5-02a3-3d3fec13adbe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.734199] env[62070]: DEBUG nova.network.neutron [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Successfully updated port: 8876137b-4c95-4f50-8bf9-ad7d44ac5052 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 942.821841] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.355s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.822377] env[62070]: DEBUG nova.compute.manager [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 942.824803] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.932s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.825046] env[62070]: DEBUG nova.objects.instance [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lazy-loading 'resources' on Instance uuid 328fbc92-8162-4e12-a02d-6e9cafe0c365 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 942.910147] env[62070]: DEBUG oslo_vmware.api [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122086, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075643} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.910944] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 942.911198] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-210205eb-5b36-43a7-83af-6b88b6ca1c24 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.932842] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] 84c00e4a-20d3-4739-8535-e27076d85a89/84c00e4a-20d3-4739-8535-e27076d85a89.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 942.933145] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3cf7bb31-9df2-4a93-8835-4f97afd05681 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.952594] env[62070]: DEBUG oslo_vmware.api [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 942.952594] env[62070]: value = "task-1122087" [ 942.952594] env[62070]: _type = "Task" [ 942.952594] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.960098] env[62070]: DEBUG oslo_vmware.api [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122087, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.140205] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]524a6b28-295f-71c9-0104-a71f922c8d0a, 'name': SearchDatastore_Task, 'duration_secs': 0.007652} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.140966] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97a00ac9-52ea-47bc-8c0e-c5a7e9b4f962 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.147260] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Waiting for the task: (returnval){ [ 943.147260] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5252eef1-4193-fbf0-a9f7-a48d6578b4e7" [ 943.147260] env[62070]: _type = "Task" [ 943.147260] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.155366] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5252eef1-4193-fbf0-a9f7-a48d6578b4e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.220476] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52a25bc8-c0c1-5cb5-02a3-3d3fec13adbe, 'name': SearchDatastore_Task, 'duration_secs': 0.00845} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.220476] env[62070]: DEBUG oslo_concurrency.lockutils [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.220476] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 943.220476] env[62070]: DEBUG oslo_concurrency.lockutils [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.237072] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "refresh_cache-2c58db1d-405f-4489-85db-c74723be4a8d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.237214] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired lock "refresh_cache-2c58db1d-405f-4489-85db-c74723be4a8d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.238439] env[62070]: DEBUG nova.network.neutron [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 943.239391] env[62070]: DEBUG nova.network.neutron [req-b7e00efb-8535-4bd7-91c9-3f6c177d4d53 req-34f86737-4364-457a-80c6-11d2e569eddc service nova] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Updated VIF entry in instance network info cache for port 7fbe6487-eae6-49f2-894f-82f8519f4232. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 943.239746] env[62070]: DEBUG nova.network.neutron [req-b7e00efb-8535-4bd7-91c9-3f6c177d4d53 req-34f86737-4364-457a-80c6-11d2e569eddc service nova] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Updating instance_info_cache with network_info: [{"id": "7fbe6487-eae6-49f2-894f-82f8519f4232", "address": "fa:16:3e:9e:62:34", "network": {"id": "5ea0fffc-372c-450e-b27b-10959077d58f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1853458988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9191f0e6c2ee401abca64c0780e230bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3c995e9-7f2f-420c-880a-d60da6e708ad", "external-id": "nsx-vlan-transportzone-166", "segmentation_id": 166, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fbe6487-ea", "ovs_interfaceid": "7fbe6487-eae6-49f2-894f-82f8519f4232", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.328403] env[62070]: DEBUG nova.compute.utils [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 943.332505] env[62070]: DEBUG nova.compute.manager [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 943.332723] env[62070]: DEBUG nova.network.neutron [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 943.374451] env[62070]: DEBUG nova.policy [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '093d4b68ffd04d4d951f5be91bfc76e8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eac8e5edc8f14fff89aba7c8cb6cac5d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 943.464229] env[62070]: DEBUG oslo_vmware.api [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122087, 'name': ReconfigVM_Task, 'duration_secs': 0.303904} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.464702] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Reconfigured VM instance instance-00000053 to attach disk [datastore2] 84c00e4a-20d3-4739-8535-e27076d85a89/84c00e4a-20d3-4739-8535-e27076d85a89.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 943.465143] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b80b78ea-2141-4532-9b0d-917f2c545887 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.472337] env[62070]: DEBUG oslo_vmware.api [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 943.472337] env[62070]: value = "task-1122088" [ 943.472337] env[62070]: _type = "Task" [ 943.472337] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.480950] env[62070]: DEBUG oslo_vmware.api [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122088, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.523161] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-068a16d2-1f3a-48e7-9288-f43eac4f4395 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Volume attach. Driver type: vmdk {{(pid=62070) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 943.523618] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-068a16d2-1f3a-48e7-9288-f43eac4f4395 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245461', 'volume_id': 'c400637e-5e41-43a1-91df-a0f4d9c4e253', 'name': 'volume-c400637e-5e41-43a1-91df-a0f4d9c4e253', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '7dc27fe6-495f-498d-88fe-a99ddc19a21c', 'attached_at': '', 'detached_at': '', 'volume_id': 'c400637e-5e41-43a1-91df-a0f4d9c4e253', 'serial': 'c400637e-5e41-43a1-91df-a0f4d9c4e253'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 943.524435] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee58d975-2069-41e7-a958-fb408a2b553c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.545047] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22aeb91e-5720-45ac-9c2c-2d7d3521bebc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.573463] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-068a16d2-1f3a-48e7-9288-f43eac4f4395 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] volume-c400637e-5e41-43a1-91df-a0f4d9c4e253/volume-c400637e-5e41-43a1-91df-a0f4d9c4e253.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 943.576207] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-047e1b82-a977-4814-b3bd-06d5282bfa12 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.594841] env[62070]: DEBUG oslo_vmware.api [None req-068a16d2-1f3a-48e7-9288-f43eac4f4395 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 943.594841] env[62070]: value = "task-1122089" [ 943.594841] env[62070]: _type = "Task" [ 943.594841] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.602342] env[62070]: DEBUG oslo_vmware.api [None req-068a16d2-1f3a-48e7-9288-f43eac4f4395 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122089, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.641772] env[62070]: DEBUG nova.network.neutron [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Successfully created port: 52474984-690f-441c-9477-d3d0a3ab1bb7 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 943.648037] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f607eed-8006-48c3-9b7e-35f60b4894ca {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.660190] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5252eef1-4193-fbf0-a9f7-a48d6578b4e7, 'name': SearchDatastore_Task, 'duration_secs': 0.00934} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.661841] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.662129] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 1440361b-d3b4-4c1c-995c-fe7ff99ee297/1440361b-d3b4-4c1c-995c-fe7ff99ee297.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 943.663917] env[62070]: DEBUG oslo_concurrency.lockutils [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.663917] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 943.663917] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4a07d11c-f698-4fdf-a4cd-dca91e00506a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.665667] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63b608c-183a-4585-b673-4d47bf8cade9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.668602] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12dccef0-f75c-4e53-9350-7c267850a764 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.697910] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Waiting for the task: (returnval){ [ 943.697910] env[62070]: value = "task-1122090" [ 943.697910] env[62070]: _type = "Task" [ 943.697910] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.699481] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af08de3e-26b7-4388-951b-cec7333af13c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.702098] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 943.702266] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 943.705540] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b1202dd-bcb0-4670-a6b8-1ba7ef3a4a01 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.713026] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122090, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.715795] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 943.715795] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52a4984a-58e1-66a1-0983-3e41b2e3f6d9" [ 943.715795] env[62070]: _type = "Task" [ 943.715795] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.717071] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23040ca2-f774-493c-a39f-4daea583d82a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.726561] env[62070]: DEBUG nova.compute.manager [req-d68b47ee-e2e2-4b82-9167-2dfe686717f3 req-464df35a-440d-4468-8fca-561d15d745db service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Received event network-vif-plugged-60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 943.726771] env[62070]: DEBUG oslo_concurrency.lockutils [req-d68b47ee-e2e2-4b82-9167-2dfe686717f3 req-464df35a-440d-4468-8fca-561d15d745db service nova] Acquiring lock "21bcb1a6-833b-48f3-8ee2-0e49c64a104f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.726982] env[62070]: DEBUG oslo_concurrency.lockutils [req-d68b47ee-e2e2-4b82-9167-2dfe686717f3 req-464df35a-440d-4468-8fca-561d15d745db service nova] Lock "21bcb1a6-833b-48f3-8ee2-0e49c64a104f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.727178] env[62070]: DEBUG oslo_concurrency.lockutils [req-d68b47ee-e2e2-4b82-9167-2dfe686717f3 req-464df35a-440d-4468-8fca-561d15d745db service nova] Lock "21bcb1a6-833b-48f3-8ee2-0e49c64a104f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.727368] env[62070]: DEBUG nova.compute.manager [req-d68b47ee-e2e2-4b82-9167-2dfe686717f3 req-464df35a-440d-4468-8fca-561d15d745db service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] No waiting events found dispatching network-vif-plugged-60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 943.727551] env[62070]: WARNING nova.compute.manager [req-d68b47ee-e2e2-4b82-9167-2dfe686717f3 req-464df35a-440d-4468-8fca-561d15d745db service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Received unexpected event network-vif-plugged-60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70 for instance with vm_state active and task_state None. [ 943.740721] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52a4984a-58e1-66a1-0983-3e41b2e3f6d9, 'name': SearchDatastore_Task, 'duration_secs': 0.015006} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.741493] env[62070]: DEBUG nova.compute.provider_tree [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 943.745041] env[62070]: DEBUG oslo_concurrency.lockutils [req-b7e00efb-8535-4bd7-91c9-3f6c177d4d53 req-34f86737-4364-457a-80c6-11d2e569eddc service nova] Releasing lock "refresh_cache-e850734f-c49c-46d7-87ab-b0d6bed89d9b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.745578] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-199d2af2-a315-4689-a2f0-fa1b57646a59 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.751523] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 943.751523] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]529e08a8-4672-8360-4d95-0c5794734930" [ 943.751523] env[62070]: _type = "Task" [ 943.751523] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.759974] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]529e08a8-4672-8360-4d95-0c5794734930, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.833315] env[62070]: DEBUG nova.compute.manager [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 943.983212] env[62070]: DEBUG oslo_vmware.api [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122088, 'name': Rename_Task, 'duration_secs': 0.16103} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.983637] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 943.983905] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3d5c0b90-d524-4450-b46f-7ae95f19fe69 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.990400] env[62070]: DEBUG oslo_vmware.api [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 943.990400] env[62070]: value = "task-1122091" [ 943.990400] env[62070]: _type = "Task" [ 943.990400] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.000230] env[62070]: DEBUG oslo_vmware.api [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122091, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.030700] env[62070]: DEBUG nova.network.neutron [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Successfully updated port: 60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 944.037602] env[62070]: DEBUG nova.network.neutron [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 944.108390] env[62070]: DEBUG oslo_vmware.api [None req-068a16d2-1f3a-48e7-9288-f43eac4f4395 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122089, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.214131] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122090, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.261851] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]529e08a8-4672-8360-4d95-0c5794734930, 'name': SearchDatastore_Task, 'duration_secs': 0.012291} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.262128] env[62070]: DEBUG oslo_concurrency.lockutils [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.262466] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] e850734f-c49c-46d7-87ab-b0d6bed89d9b/e850734f-c49c-46d7-87ab-b0d6bed89d9b.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 944.262655] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5dfdf62c-e08c-473a-999a-593ee1291cbb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.268051] env[62070]: ERROR nova.scheduler.client.report [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [req-dabacad0-7489-4b07-b6c2-7a27749b2e9f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 21c7c111-1b69-4468-b2c4-5dd96014fbd6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-dabacad0-7489-4b07-b6c2-7a27749b2e9f"}]} [ 944.271768] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 944.271768] env[62070]: value = "task-1122092" [ 944.271768] env[62070]: _type = "Task" [ 944.271768] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.280782] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122092, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.281919] env[62070]: DEBUG nova.network.neutron [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Updating instance_info_cache with network_info: [{"id": "8876137b-4c95-4f50-8bf9-ad7d44ac5052", "address": "fa:16:3e:e2:57:14", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8876137b-4c", "ovs_interfaceid": "8876137b-4c95-4f50-8bf9-ad7d44ac5052", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.288049] env[62070]: DEBUG nova.scheduler.client.report [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Refreshing inventories for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 944.303956] env[62070]: DEBUG nova.scheduler.client.report [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Updating ProviderTree inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 944.304217] env[62070]: DEBUG nova.compute.provider_tree [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 944.317177] env[62070]: DEBUG nova.scheduler.client.report [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Refreshing aggregate associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, aggregates: None {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 944.336164] env[62070]: DEBUG nova.scheduler.client.report [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Refreshing trait associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 944.502929] env[62070]: DEBUG oslo_vmware.api [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122091, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.535286] env[62070]: DEBUG oslo_concurrency.lockutils [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 944.535489] env[62070]: DEBUG oslo_concurrency.lockutils [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.535861] env[62070]: DEBUG nova.network.neutron [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 944.601339] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82bdf5a1-26ff-41ed-a7e7-67112afc5eb2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.609115] env[62070]: DEBUG oslo_vmware.api [None req-068a16d2-1f3a-48e7-9288-f43eac4f4395 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122089, 'name': ReconfigVM_Task, 'duration_secs': 0.66309} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.610925] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-068a16d2-1f3a-48e7-9288-f43eac4f4395 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Reconfigured VM instance instance-0000004e to attach disk [datastore2] volume-c400637e-5e41-43a1-91df-a0f4d9c4e253/volume-c400637e-5e41-43a1-91df-a0f4d9c4e253.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 944.615894] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b569d2a6-7c81-4287-8296-fc219aae1c74 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.626613] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47bbe505-106b-4cd9-9814-703d21b93b30 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.631621] env[62070]: DEBUG nova.compute.manager [req-b262651f-160d-4586-92d5-91da87aa5efa req-439420a6-d859-48cb-ac5a-12c8ef6bfff6 service nova] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Received event network-vif-plugged-8876137b-4c95-4f50-8bf9-ad7d44ac5052 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 944.631852] env[62070]: DEBUG oslo_concurrency.lockutils [req-b262651f-160d-4586-92d5-91da87aa5efa req-439420a6-d859-48cb-ac5a-12c8ef6bfff6 service nova] Acquiring lock "2c58db1d-405f-4489-85db-c74723be4a8d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.632074] env[62070]: DEBUG oslo_concurrency.lockutils [req-b262651f-160d-4586-92d5-91da87aa5efa req-439420a6-d859-48cb-ac5a-12c8ef6bfff6 service nova] Lock "2c58db1d-405f-4489-85db-c74723be4a8d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.632247] env[62070]: DEBUG oslo_concurrency.lockutils [req-b262651f-160d-4586-92d5-91da87aa5efa req-439420a6-d859-48cb-ac5a-12c8ef6bfff6 service nova] Lock "2c58db1d-405f-4489-85db-c74723be4a8d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.632418] env[62070]: DEBUG nova.compute.manager [req-b262651f-160d-4586-92d5-91da87aa5efa req-439420a6-d859-48cb-ac5a-12c8ef6bfff6 service nova] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] No waiting events found dispatching network-vif-plugged-8876137b-4c95-4f50-8bf9-ad7d44ac5052 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 944.632585] env[62070]: WARNING nova.compute.manager [req-b262651f-160d-4586-92d5-91da87aa5efa req-439420a6-d859-48cb-ac5a-12c8ef6bfff6 service nova] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Received unexpected event network-vif-plugged-8876137b-4c95-4f50-8bf9-ad7d44ac5052 for instance with vm_state building and task_state spawning. [ 944.632746] env[62070]: DEBUG nova.compute.manager [req-b262651f-160d-4586-92d5-91da87aa5efa req-439420a6-d859-48cb-ac5a-12c8ef6bfff6 service nova] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Received event network-changed-8876137b-4c95-4f50-8bf9-ad7d44ac5052 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 944.632901] env[62070]: DEBUG nova.compute.manager [req-b262651f-160d-4586-92d5-91da87aa5efa req-439420a6-d859-48cb-ac5a-12c8ef6bfff6 service nova] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Refreshing instance network info cache due to event network-changed-8876137b-4c95-4f50-8bf9-ad7d44ac5052. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 944.633083] env[62070]: DEBUG oslo_concurrency.lockutils [req-b262651f-160d-4586-92d5-91da87aa5efa req-439420a6-d859-48cb-ac5a-12c8ef6bfff6 service nova] Acquiring lock "refresh_cache-2c58db1d-405f-4489-85db-c74723be4a8d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 944.662417] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba78a6c0-4e3e-400d-b4c2-9cfcc251a1fb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.665046] env[62070]: DEBUG oslo_vmware.api [None req-068a16d2-1f3a-48e7-9288-f43eac4f4395 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 944.665046] env[62070]: value = "task-1122093" [ 944.665046] env[62070]: _type = "Task" [ 944.665046] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.671900] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02e8ef1f-0669-4c27-8c06-0b41209b607c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.678626] env[62070]: DEBUG oslo_vmware.api [None req-068a16d2-1f3a-48e7-9288-f43eac4f4395 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122093, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.688662] env[62070]: DEBUG nova.compute.provider_tree [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 944.713635] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122090, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.781815] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122092, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.785364] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Releasing lock "refresh_cache-2c58db1d-405f-4489-85db-c74723be4a8d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.785364] env[62070]: DEBUG nova.compute.manager [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Instance network_info: |[{"id": "8876137b-4c95-4f50-8bf9-ad7d44ac5052", "address": "fa:16:3e:e2:57:14", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8876137b-4c", "ovs_interfaceid": "8876137b-4c95-4f50-8bf9-ad7d44ac5052", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 944.785364] env[62070]: DEBUG oslo_concurrency.lockutils [req-b262651f-160d-4586-92d5-91da87aa5efa req-439420a6-d859-48cb-ac5a-12c8ef6bfff6 service nova] Acquired lock "refresh_cache-2c58db1d-405f-4489-85db-c74723be4a8d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.785364] env[62070]: DEBUG nova.network.neutron [req-b262651f-160d-4586-92d5-91da87aa5efa req-439420a6-d859-48cb-ac5a-12c8ef6bfff6 service nova] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Refreshing network info cache for port 8876137b-4c95-4f50-8bf9-ad7d44ac5052 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 944.786848] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:57:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1559ce49-7345-443f-bf02-4bfeb88356ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8876137b-4c95-4f50-8bf9-ad7d44ac5052', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 944.794724] env[62070]: DEBUG oslo.service.loopingcall [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 944.795781] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 944.796024] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bfe1b4c3-5021-4cb1-875e-ef88a88c2a5a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.816484] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 944.816484] env[62070]: value = "task-1122094" [ 944.816484] env[62070]: _type = "Task" [ 944.816484] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.830483] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122094, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.843834] env[62070]: DEBUG nova.compute.manager [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 944.868577] env[62070]: DEBUG nova.virt.hardware [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 944.868886] env[62070]: DEBUG nova.virt.hardware [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 944.869096] env[62070]: DEBUG nova.virt.hardware [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 944.869338] env[62070]: DEBUG nova.virt.hardware [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 944.869547] env[62070]: DEBUG nova.virt.hardware [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 944.869702] env[62070]: DEBUG nova.virt.hardware [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 944.869965] env[62070]: DEBUG nova.virt.hardware [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 944.870198] env[62070]: DEBUG nova.virt.hardware [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 944.870389] env[62070]: DEBUG nova.virt.hardware [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 944.870565] env[62070]: DEBUG nova.virt.hardware [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 944.870743] env[62070]: DEBUG nova.virt.hardware [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 944.871613] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21e5e136-9254-443f-a2ed-af2a6e75d672 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.880893] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bee6db2-2173-47ee-8368-7b2be3ea9e81 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.003178] env[62070]: DEBUG oslo_vmware.api [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122091, 'name': PowerOnVM_Task, 'duration_secs': 0.855659} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.003465] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 945.003667] env[62070]: INFO nova.compute.manager [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Took 7.24 seconds to spawn the instance on the hypervisor. [ 945.003986] env[62070]: DEBUG nova.compute.manager [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 945.004702] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b4e1f75-50f1-4b19-a4de-0c36784293d0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.084406] env[62070]: WARNING nova.network.neutron [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] 48dc51c7-cfa4-452e-9d72-2968d9a40dfa already exists in list: networks containing: ['48dc51c7-cfa4-452e-9d72-2968d9a40dfa']. ignoring it [ 945.152794] env[62070]: DEBUG nova.network.neutron [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Successfully updated port: 52474984-690f-441c-9477-d3d0a3ab1bb7 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 945.175684] env[62070]: DEBUG oslo_vmware.api [None req-068a16d2-1f3a-48e7-9288-f43eac4f4395 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122093, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.211518] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122090, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.223026] env[62070]: DEBUG nova.scheduler.client.report [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Updated inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with generation 115 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 945.223026] env[62070]: DEBUG nova.compute.provider_tree [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Updating resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 generation from 115 to 116 during operation: update_inventory {{(pid=62070) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 945.223026] env[62070]: DEBUG nova.compute.provider_tree [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 945.282560] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122092, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.329888] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122094, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.439697] env[62070]: DEBUG nova.network.neutron [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Updating instance_info_cache with network_info: [{"id": "45420f68-e309-4569-8dac-28e16d9417d7", "address": "fa:16:3e:08:c2:70", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.203", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45420f68-e3", "ovs_interfaceid": "45420f68-e309-4569-8dac-28e16d9417d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70", "address": "fa:16:3e:7c:c4:2b", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60714fe7-f6", "ovs_interfaceid": "60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.523473] env[62070]: INFO nova.compute.manager [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Took 33.31 seconds to build instance. [ 945.570429] env[62070]: DEBUG nova.network.neutron [req-b262651f-160d-4586-92d5-91da87aa5efa req-439420a6-d859-48cb-ac5a-12c8ef6bfff6 service nova] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Updated VIF entry in instance network info cache for port 8876137b-4c95-4f50-8bf9-ad7d44ac5052. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 945.570797] env[62070]: DEBUG nova.network.neutron [req-b262651f-160d-4586-92d5-91da87aa5efa req-439420a6-d859-48cb-ac5a-12c8ef6bfff6 service nova] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Updating instance_info_cache with network_info: [{"id": "8876137b-4c95-4f50-8bf9-ad7d44ac5052", "address": "fa:16:3e:e2:57:14", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8876137b-4c", "ovs_interfaceid": "8876137b-4c95-4f50-8bf9-ad7d44ac5052", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.655639] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "refresh_cache-519cad6a-ebe0-42db-a19e-27249b83436e" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.655897] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquired lock "refresh_cache-519cad6a-ebe0-42db-a19e-27249b83436e" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.655963] env[62070]: DEBUG nova.network.neutron [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 945.676850] env[62070]: DEBUG oslo_vmware.api [None req-068a16d2-1f3a-48e7-9288-f43eac4f4395 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122093, 'name': ReconfigVM_Task, 'duration_secs': 0.71815} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.677088] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-068a16d2-1f3a-48e7-9288-f43eac4f4395 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245461', 'volume_id': 'c400637e-5e41-43a1-91df-a0f4d9c4e253', 'name': 'volume-c400637e-5e41-43a1-91df-a0f4d9c4e253', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '7dc27fe6-495f-498d-88fe-a99ddc19a21c', 'attached_at': '', 'detached_at': '', 'volume_id': 'c400637e-5e41-43a1-91df-a0f4d9c4e253', 'serial': 'c400637e-5e41-43a1-91df-a0f4d9c4e253'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 945.714257] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122090, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.549105} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.714538] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 1440361b-d3b4-4c1c-995c-fe7ff99ee297/1440361b-d3b4-4c1c-995c-fe7ff99ee297.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 945.714762] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 945.715033] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-12a10b28-aa2a-4201-b37b-69080086be29 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.721768] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Waiting for the task: (returnval){ [ 945.721768] env[62070]: value = "task-1122095" [ 945.721768] env[62070]: _type = "Task" [ 945.721768] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.725553] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.901s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.728314] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.147s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.728544] env[62070]: DEBUG nova.objects.instance [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lazy-loading 'resources' on Instance uuid 3d699ce5-4d21-48f3-8f17-0cd49aebf109 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 945.736671] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122095, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.746890] env[62070]: INFO nova.scheduler.client.report [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Deleted allocations for instance 328fbc92-8162-4e12-a02d-6e9cafe0c365 [ 945.784628] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122092, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.448309} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.784917] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] e850734f-c49c-46d7-87ab-b0d6bed89d9b/e850734f-c49c-46d7-87ab-b0d6bed89d9b.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 945.785152] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 945.785434] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d46166ad-3ba6-44e8-90cf-40aabf22c885 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.793421] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 945.793421] env[62070]: value = "task-1122096" [ 945.793421] env[62070]: _type = "Task" [ 945.793421] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.805048] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122096, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.818280] env[62070]: DEBUG nova.compute.manager [req-2bf91cc3-2eef-4f5d-89a1-f81c3b6d58a2 req-fe6413f6-2acc-4b8f-9e90-2c69faefe2f5 service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Received event network-changed-60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 945.818524] env[62070]: DEBUG nova.compute.manager [req-2bf91cc3-2eef-4f5d-89a1-f81c3b6d58a2 req-fe6413f6-2acc-4b8f-9e90-2c69faefe2f5 service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Refreshing instance network info cache due to event network-changed-60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 945.821357] env[62070]: DEBUG oslo_concurrency.lockutils [req-2bf91cc3-2eef-4f5d-89a1-f81c3b6d58a2 req-fe6413f6-2acc-4b8f-9e90-2c69faefe2f5 service nova] Acquiring lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.841139] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122094, 'name': CreateVM_Task, 'duration_secs': 0.577617} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.841385] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 945.842041] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.842197] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.843055] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 945.843055] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb8c66fd-5200-4e4d-9c7a-a1001c6ea272 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.848475] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 945.848475] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5272ffaf-a1d6-0e3f-08f0-d130f578a0a1" [ 945.848475] env[62070]: _type = "Task" [ 945.848475] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.857356] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5272ffaf-a1d6-0e3f-08f0-d130f578a0a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.942249] env[62070]: DEBUG oslo_concurrency.lockutils [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Releasing lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.942951] env[62070]: DEBUG oslo_concurrency.lockutils [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.943138] env[62070]: DEBUG oslo_concurrency.lockutils [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired lock "21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.943429] env[62070]: DEBUG oslo_concurrency.lockutils [req-2bf91cc3-2eef-4f5d-89a1-f81c3b6d58a2 req-fe6413f6-2acc-4b8f-9e90-2c69faefe2f5 service nova] Acquired lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.943619] env[62070]: DEBUG nova.network.neutron [req-2bf91cc3-2eef-4f5d-89a1-f81c3b6d58a2 req-fe6413f6-2acc-4b8f-9e90-2c69faefe2f5 service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Refreshing network info cache for port 60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 945.945592] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d26d207f-e439-4f6f-a51f-5dabef04dd23 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.967667] env[62070]: DEBUG nova.virt.hardware [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 945.968635] env[62070]: DEBUG nova.virt.hardware [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 945.968965] env[62070]: DEBUG nova.virt.hardware [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 945.969351] env[62070]: DEBUG nova.virt.hardware [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 945.969517] env[62070]: DEBUG nova.virt.hardware [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 945.969740] env[62070]: DEBUG nova.virt.hardware [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 945.970022] env[62070]: DEBUG nova.virt.hardware [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 945.970205] env[62070]: DEBUG nova.virt.hardware [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 945.970385] env[62070]: DEBUG nova.virt.hardware [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 945.970570] env[62070]: DEBUG nova.virt.hardware [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 945.970758] env[62070]: DEBUG nova.virt.hardware [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 945.977074] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Reconfiguring VM to attach interface {{(pid=62070) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1929}} [ 945.978158] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d4d8d952-1b34-464f-8d56-a931bbc4324c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.998773] env[62070]: DEBUG oslo_vmware.api [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 945.998773] env[62070]: value = "task-1122097" [ 945.998773] env[62070]: _type = "Task" [ 945.998773] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.008796] env[62070]: DEBUG oslo_vmware.api [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122097, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.025815] env[62070]: DEBUG oslo_concurrency.lockutils [None req-906804fc-a7e7-4fcf-821d-5e65108abaa1 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "84c00e4a-20d3-4739-8535-e27076d85a89" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.824s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.074351] env[62070]: DEBUG oslo_concurrency.lockutils [req-b262651f-160d-4586-92d5-91da87aa5efa req-439420a6-d859-48cb-ac5a-12c8ef6bfff6 service nova] Releasing lock "refresh_cache-2c58db1d-405f-4489-85db-c74723be4a8d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.235254] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122095, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077934} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.236217] env[62070]: DEBUG nova.network.neutron [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 946.237966] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 946.238780] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ddb7fe-9ea3-4590-9d64-7e47c1291d13 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.254736] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c00d74f8-0b67-48be-9e7d-16251e5d55eb tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "328fbc92-8162-4e12-a02d-6e9cafe0c365" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.421s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.264763] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] 1440361b-d3b4-4c1c-995c-fe7ff99ee297/1440361b-d3b4-4c1c-995c-fe7ff99ee297.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 946.270498] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f8754a4-3500-4908-a92c-340cf61a66fe {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.292505] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Waiting for the task: (returnval){ [ 946.292505] env[62070]: value = "task-1122098" [ 946.292505] env[62070]: _type = "Task" [ 946.292505] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.306835] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122098, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.310215] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122096, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072291} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.310560] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 946.311459] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-699dcdb9-f9b1-4ae8-a51b-13a7f98ebad6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.337139] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] e850734f-c49c-46d7-87ab-b0d6bed89d9b/e850734f-c49c-46d7-87ab-b0d6bed89d9b.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 946.341842] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ae16395-a71c-4384-b09e-c69ada5307d5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.366194] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5272ffaf-a1d6-0e3f-08f0-d130f578a0a1, 'name': SearchDatastore_Task, 'duration_secs': 0.012029} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.367517] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.367790] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 946.368046] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 946.368205] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.368418] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 946.368758] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 946.368758] env[62070]: value = "task-1122099" [ 946.368758] env[62070]: _type = "Task" [ 946.368758] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.371237] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6e265fe-27e7-47a3-bd77-bb187b6d11fd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.383345] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122099, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.387276] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 946.387495] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 946.388256] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e760a3aa-fe71-43de-8048-249461d33049 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.393981] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 946.393981] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]522a52aa-58fe-1df7-1781-622a2bc0af4d" [ 946.393981] env[62070]: _type = "Task" [ 946.393981] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.404957] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]522a52aa-58fe-1df7-1781-622a2bc0af4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.514906] env[62070]: DEBUG oslo_vmware.api [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122097, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.548524] env[62070]: DEBUG nova.network.neutron [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Updating instance_info_cache with network_info: [{"id": "52474984-690f-441c-9477-d3d0a3ab1bb7", "address": "fa:16:3e:74:17:31", "network": {"id": "5f4568f0-c3e8-497f-b7d6-8d92db2f4066", "bridge": "br-int", "label": "tempest-ImagesTestJSON-24352632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eac8e5edc8f14fff89aba7c8cb6cac5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52474984-69", "ovs_interfaceid": "52474984-690f-441c-9477-d3d0a3ab1bb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.575620] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cfa7b5e-a276-48be-87b7-2414d5ed4f65 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.586495] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-758fa246-cb73-4c22-80b0-b698bc6e9617 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.621991] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2501e594-04df-4d85-8766-bd5c719d1487 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.630426] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b1ae0dc-dc64-488b-bdcc-fe7f23425ca6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.645087] env[62070]: DEBUG nova.compute.provider_tree [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 946.716427] env[62070]: DEBUG nova.objects.instance [None req-068a16d2-1f3a-48e7-9288-f43eac4f4395 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lazy-loading 'flavor' on Instance uuid 7dc27fe6-495f-498d-88fe-a99ddc19a21c {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 946.784189] env[62070]: DEBUG nova.network.neutron [req-2bf91cc3-2eef-4f5d-89a1-f81c3b6d58a2 req-fe6413f6-2acc-4b8f-9e90-2c69faefe2f5 service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Updated VIF entry in instance network info cache for port 60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 946.784695] env[62070]: DEBUG nova.network.neutron [req-2bf91cc3-2eef-4f5d-89a1-f81c3b6d58a2 req-fe6413f6-2acc-4b8f-9e90-2c69faefe2f5 service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Updating instance_info_cache with network_info: [{"id": "45420f68-e309-4569-8dac-28e16d9417d7", "address": "fa:16:3e:08:c2:70", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.203", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45420f68-e3", "ovs_interfaceid": "45420f68-e309-4569-8dac-28e16d9417d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70", "address": "fa:16:3e:7c:c4:2b", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60714fe7-f6", "ovs_interfaceid": "60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.805827] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122098, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.842676] env[62070]: DEBUG nova.compute.manager [req-1f04d34b-88b3-4bba-9887-1316ecda05bf req-37afe621-7d70-46c5-bd2c-a0e96bd032a2 service nova] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Received event network-vif-plugged-52474984-690f-441c-9477-d3d0a3ab1bb7 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 946.842984] env[62070]: DEBUG oslo_concurrency.lockutils [req-1f04d34b-88b3-4bba-9887-1316ecda05bf req-37afe621-7d70-46c5-bd2c-a0e96bd032a2 service nova] Acquiring lock "519cad6a-ebe0-42db-a19e-27249b83436e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.843241] env[62070]: DEBUG oslo_concurrency.lockutils [req-1f04d34b-88b3-4bba-9887-1316ecda05bf req-37afe621-7d70-46c5-bd2c-a0e96bd032a2 service nova] Lock "519cad6a-ebe0-42db-a19e-27249b83436e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.843421] env[62070]: DEBUG oslo_concurrency.lockutils [req-1f04d34b-88b3-4bba-9887-1316ecda05bf req-37afe621-7d70-46c5-bd2c-a0e96bd032a2 service nova] Lock "519cad6a-ebe0-42db-a19e-27249b83436e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.843618] env[62070]: DEBUG nova.compute.manager [req-1f04d34b-88b3-4bba-9887-1316ecda05bf req-37afe621-7d70-46c5-bd2c-a0e96bd032a2 service nova] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] No waiting events found dispatching network-vif-plugged-52474984-690f-441c-9477-d3d0a3ab1bb7 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 946.843793] env[62070]: WARNING nova.compute.manager [req-1f04d34b-88b3-4bba-9887-1316ecda05bf req-37afe621-7d70-46c5-bd2c-a0e96bd032a2 service nova] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Received unexpected event network-vif-plugged-52474984-690f-441c-9477-d3d0a3ab1bb7 for instance with vm_state building and task_state spawning. [ 946.843956] env[62070]: DEBUG nova.compute.manager [req-1f04d34b-88b3-4bba-9887-1316ecda05bf req-37afe621-7d70-46c5-bd2c-a0e96bd032a2 service nova] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Received event network-changed-52474984-690f-441c-9477-d3d0a3ab1bb7 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 946.844254] env[62070]: DEBUG nova.compute.manager [req-1f04d34b-88b3-4bba-9887-1316ecda05bf req-37afe621-7d70-46c5-bd2c-a0e96bd032a2 service nova] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Refreshing instance network info cache due to event network-changed-52474984-690f-441c-9477-d3d0a3ab1bb7. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 946.844444] env[62070]: DEBUG oslo_concurrency.lockutils [req-1f04d34b-88b3-4bba-9887-1316ecda05bf req-37afe621-7d70-46c5-bd2c-a0e96bd032a2 service nova] Acquiring lock "refresh_cache-519cad6a-ebe0-42db-a19e-27249b83436e" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 946.885903] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122099, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.904340] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]522a52aa-58fe-1df7-1781-622a2bc0af4d, 'name': SearchDatastore_Task, 'duration_secs': 0.014323} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.905420] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-515305aa-0166-473b-bbd4-7ca3719e4516 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.911337] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 946.911337] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5244ef5a-3a11-b197-33f5-8c027e858473" [ 946.911337] env[62070]: _type = "Task" [ 946.911337] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.919586] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5244ef5a-3a11-b197-33f5-8c027e858473, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.014168] env[62070]: DEBUG oslo_vmware.api [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122097, 'name': ReconfigVM_Task, 'duration_secs': 0.573886} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.015398] env[62070]: DEBUG oslo_concurrency.lockutils [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Releasing lock "21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.015686] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Reconfigured VM to attach interface {{(pid=62070) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1943}} [ 947.051694] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Releasing lock "refresh_cache-519cad6a-ebe0-42db-a19e-27249b83436e" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.052072] env[62070]: DEBUG nova.compute.manager [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Instance network_info: |[{"id": "52474984-690f-441c-9477-d3d0a3ab1bb7", "address": "fa:16:3e:74:17:31", "network": {"id": "5f4568f0-c3e8-497f-b7d6-8d92db2f4066", "bridge": "br-int", "label": "tempest-ImagesTestJSON-24352632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eac8e5edc8f14fff89aba7c8cb6cac5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52474984-69", "ovs_interfaceid": "52474984-690f-441c-9477-d3d0a3ab1bb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 947.052397] env[62070]: DEBUG oslo_concurrency.lockutils [req-1f04d34b-88b3-4bba-9887-1316ecda05bf req-37afe621-7d70-46c5-bd2c-a0e96bd032a2 service nova] Acquired lock "refresh_cache-519cad6a-ebe0-42db-a19e-27249b83436e" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.052632] env[62070]: DEBUG nova.network.neutron [req-1f04d34b-88b3-4bba-9887-1316ecda05bf req-37afe621-7d70-46c5-bd2c-a0e96bd032a2 service nova] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Refreshing network info cache for port 52474984-690f-441c-9477-d3d0a3ab1bb7 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 947.054250] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:17:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '52474984-690f-441c-9477-d3d0a3ab1bb7', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 947.063248] env[62070]: DEBUG oslo.service.loopingcall [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 947.066187] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 947.066682] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-237b42a8-2195-436e-8e4e-adebe3fc7e12 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.088543] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 947.088543] env[62070]: value = "task-1122100" [ 947.088543] env[62070]: _type = "Task" [ 947.088543] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.097491] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122100, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.149083] env[62070]: DEBUG nova.scheduler.client.report [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 947.221761] env[62070]: DEBUG oslo_concurrency.lockutils [None req-068a16d2-1f3a-48e7-9288-f43eac4f4395 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "7dc27fe6-495f-498d-88fe-a99ddc19a21c" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.311s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.288164] env[62070]: DEBUG oslo_concurrency.lockutils [req-2bf91cc3-2eef-4f5d-89a1-f81c3b6d58a2 req-fe6413f6-2acc-4b8f-9e90-2c69faefe2f5 service nova] Releasing lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.308809] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122098, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.385021] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122099, 'name': ReconfigVM_Task, 'duration_secs': 0.543385} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.385379] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Reconfigured VM instance instance-00000054 to attach disk [datastore2] e850734f-c49c-46d7-87ab-b0d6bed89d9b/e850734f-c49c-46d7-87ab-b0d6bed89d9b.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 947.386133] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-53f910ee-cfdb-4952-8d9f-afae166d7b78 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.394366] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 947.394366] env[62070]: value = "task-1122101" [ 947.394366] env[62070]: _type = "Task" [ 947.394366] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.402877] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122101, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.403828] env[62070]: DEBUG nova.network.neutron [req-1f04d34b-88b3-4bba-9887-1316ecda05bf req-37afe621-7d70-46c5-bd2c-a0e96bd032a2 service nova] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Updated VIF entry in instance network info cache for port 52474984-690f-441c-9477-d3d0a3ab1bb7. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 947.404183] env[62070]: DEBUG nova.network.neutron [req-1f04d34b-88b3-4bba-9887-1316ecda05bf req-37afe621-7d70-46c5-bd2c-a0e96bd032a2 service nova] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Updating instance_info_cache with network_info: [{"id": "52474984-690f-441c-9477-d3d0a3ab1bb7", "address": "fa:16:3e:74:17:31", "network": {"id": "5f4568f0-c3e8-497f-b7d6-8d92db2f4066", "bridge": "br-int", "label": "tempest-ImagesTestJSON-24352632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eac8e5edc8f14fff89aba7c8cb6cac5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52474984-69", "ovs_interfaceid": "52474984-690f-441c-9477-d3d0a3ab1bb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.423114] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5244ef5a-3a11-b197-33f5-8c027e858473, 'name': SearchDatastore_Task, 'duration_secs': 0.008965} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.423452] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.423766] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 2c58db1d-405f-4489-85db-c74723be4a8d/2c58db1d-405f-4489-85db-c74723be4a8d.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 947.424073] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d87ab741-4c08-42a9-83d4-a576c8053b5f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.432353] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 947.432353] env[62070]: value = "task-1122102" [ 947.432353] env[62070]: _type = "Task" [ 947.432353] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.444513] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122102, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.522564] env[62070]: DEBUG oslo_concurrency.lockutils [None req-58873f53-be6c-4871-8071-bfb0239a065e tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "interface-21bcb1a6-833b-48f3-8ee2-0e49c64a104f-60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.686s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.600123] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122100, 'name': CreateVM_Task, 'duration_secs': 0.390962} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.600332] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 947.601089] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.601302] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.601652] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 947.601951] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21293911-ddae-4ea8-b40a-077915c542be {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.603989] env[62070]: INFO nova.compute.manager [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Rescuing [ 947.604251] env[62070]: DEBUG oslo_concurrency.lockutils [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "refresh_cache-7dc27fe6-495f-498d-88fe-a99ddc19a21c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.604412] env[62070]: DEBUG oslo_concurrency.lockutils [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquired lock "refresh_cache-7dc27fe6-495f-498d-88fe-a99ddc19a21c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.604591] env[62070]: DEBUG nova.network.neutron [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 947.610020] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 947.610020] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52fde6f1-59bf-a4a5-e2d4-e86f7747f346" [ 947.610020] env[62070]: _type = "Task" [ 947.610020] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.618942] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52fde6f1-59bf-a4a5-e2d4-e86f7747f346, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.656368] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.927s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.658905] env[62070]: DEBUG oslo_concurrency.lockutils [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 25.313s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.682623] env[62070]: INFO nova.scheduler.client.report [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Deleted allocations for instance 3d699ce5-4d21-48f3-8f17-0cd49aebf109 [ 947.812807] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122098, 'name': ReconfigVM_Task, 'duration_secs': 1.097899} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.813222] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Reconfigured VM instance instance-00000052 to attach disk [datastore2] 1440361b-d3b4-4c1c-995c-fe7ff99ee297/1440361b-d3b4-4c1c-995c-fe7ff99ee297.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 947.813986] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b3e6943d-ae42-4c4d-a85b-b34167f9d9ca {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.826509] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Waiting for the task: (returnval){ [ 947.826509] env[62070]: value = "task-1122103" [ 947.826509] env[62070]: _type = "Task" [ 947.826509] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.838908] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122103, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.906458] env[62070]: DEBUG oslo_concurrency.lockutils [req-1f04d34b-88b3-4bba-9887-1316ecda05bf req-37afe621-7d70-46c5-bd2c-a0e96bd032a2 service nova] Releasing lock "refresh_cache-519cad6a-ebe0-42db-a19e-27249b83436e" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.906931] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122101, 'name': Rename_Task, 'duration_secs': 0.202777} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.907214] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 947.907520] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-853afff2-9803-4bf9-b01c-05c2e4327aa2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.920261] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 947.920261] env[62070]: value = "task-1122104" [ 947.920261] env[62070]: _type = "Task" [ 947.920261] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.932369] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122104, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.942140] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122102, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.124302] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52fde6f1-59bf-a4a5-e2d4-e86f7747f346, 'name': SearchDatastore_Task, 'duration_secs': 0.010391} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.124621] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.124972] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 948.125346] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.125578] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.125845] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 948.126206] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b582536-5342-4268-a3ad-d5f1154d388a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.136464] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 948.136822] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 948.137953] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-771e99a7-da28-48c6-9ffe-cabaf9088559 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.145712] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 948.145712] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52d7eca4-8068-2725-8c97-845975dd2931" [ 948.145712] env[62070]: _type = "Task" [ 948.145712] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.164777] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52d7eca4-8068-2725-8c97-845975dd2931, 'name': SearchDatastore_Task, 'duration_secs': 0.010725} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.166464] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1aa6a2b-9a3e-4ce8-88dc-283fe40853c3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.173175] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 948.173175] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5255f073-c906-ae0f-953f-3518269c5f79" [ 948.173175] env[62070]: _type = "Task" [ 948.173175] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.182180] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5255f073-c906-ae0f-953f-3518269c5f79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.191323] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4f5edc69-29d0-4c47-a9c1-354e8eddef6f tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "3d699ce5-4d21-48f3-8f17-0cd49aebf109" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.568s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.340635] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122103, 'name': Rename_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.432206] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122104, 'name': PowerOnVM_Task} progress is 92%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.433795] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e49f00-34b0-434e-ac42-9cc5942bfb74 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.444658] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122102, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.585847} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.446589] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 2c58db1d-405f-4489-85db-c74723be4a8d/2c58db1d-405f-4489-85db-c74723be4a8d.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 948.446830] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 948.447142] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-92da7260-819e-40dd-8e06-031615a52a81 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.449832] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe80b1f-ddfb-4946-9bf1-96bf00e09dda {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.496046] env[62070]: DEBUG nova.network.neutron [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Updating instance_info_cache with network_info: [{"id": "cac26624-11c7-45a9-acb3-3e86b7232ab2", "address": "fa:16:3e:8c:68:96", "network": {"id": "754f4ec8-0bc6-4726-8b88-1a4e1a326699", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-293486644-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a94db233e3a43dc9aa7cf887c6cb1f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcac26624-11", "ovs_interfaceid": "cac26624-11c7-45a9-acb3-3e86b7232ab2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.501025] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59babaec-edda-48a5-ae38-4a7357171b8b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.501818] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 948.501818] env[62070]: value = "task-1122105" [ 948.501818] env[62070]: _type = "Task" [ 948.501818] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.511299] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f888023-b857-4be5-8d6e-d517cd802b61 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.519445] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122105, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.531562] env[62070]: DEBUG nova.compute.provider_tree [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 948.683718] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5255f073-c906-ae0f-953f-3518269c5f79, 'name': SearchDatastore_Task, 'duration_secs': 0.01274} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.683997] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.684272] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 519cad6a-ebe0-42db-a19e-27249b83436e/519cad6a-ebe0-42db-a19e-27249b83436e.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 948.684587] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f94492b-3044-4bfd-b73d-0241b46e0695 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.691364] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 948.691364] env[62070]: value = "task-1122106" [ 948.691364] env[62070]: _type = "Task" [ 948.691364] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.699661] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122106, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.838599] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122103, 'name': Rename_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.882427] env[62070]: DEBUG nova.compute.manager [req-54c41c17-0903-414d-a0c6-3d4211e41b07 req-bbd094b6-304f-4d25-9e5e-7079a876a221 service nova] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Received event network-changed-0e90b544-5a90-4009-8f52-635e393cf106 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 948.882549] env[62070]: DEBUG nova.compute.manager [req-54c41c17-0903-414d-a0c6-3d4211e41b07 req-bbd094b6-304f-4d25-9e5e-7079a876a221 service nova] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Refreshing instance network info cache due to event network-changed-0e90b544-5a90-4009-8f52-635e393cf106. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 948.882769] env[62070]: DEBUG oslo_concurrency.lockutils [req-54c41c17-0903-414d-a0c6-3d4211e41b07 req-bbd094b6-304f-4d25-9e5e-7079a876a221 service nova] Acquiring lock "refresh_cache-84c00e4a-20d3-4739-8535-e27076d85a89" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.882912] env[62070]: DEBUG oslo_concurrency.lockutils [req-54c41c17-0903-414d-a0c6-3d4211e41b07 req-bbd094b6-304f-4d25-9e5e-7079a876a221 service nova] Acquired lock "refresh_cache-84c00e4a-20d3-4739-8535-e27076d85a89" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.883094] env[62070]: DEBUG nova.network.neutron [req-54c41c17-0903-414d-a0c6-3d4211e41b07 req-bbd094b6-304f-4d25-9e5e-7079a876a221 service nova] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Refreshing network info cache for port 0e90b544-5a90-4009-8f52-635e393cf106 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 948.932071] env[62070]: DEBUG oslo_vmware.api [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122104, 'name': PowerOnVM_Task, 'duration_secs': 0.617727} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.932374] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 948.932583] env[62070]: INFO nova.compute.manager [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Took 8.79 seconds to spawn the instance on the hypervisor. [ 948.932763] env[62070]: DEBUG nova.compute.manager [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 948.933528] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-882a33d6-9c89-4086-8b6f-8d256e8a8a9e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.003954] env[62070]: DEBUG oslo_concurrency.lockutils [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Releasing lock "refresh_cache-7dc27fe6-495f-498d-88fe-a99ddc19a21c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.019167] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122105, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086001} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.021576] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 949.022631] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d93ef3f-1626-469a-a9a8-855b2dc58d8f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.051380] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 2c58db1d-405f-4489-85db-c74723be4a8d/2c58db1d-405f-4489-85db-c74723be4a8d.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 949.052668] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1de9f874-5625-4a09-8851-0333317c4354 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.068090] env[62070]: ERROR nova.scheduler.client.report [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [req-14a2e9d7-a5fd-4ce4-978a-70c32fcce6d3] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 21c7c111-1b69-4468-b2c4-5dd96014fbd6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-14a2e9d7-a5fd-4ce4-978a-70c32fcce6d3"}]} [ 949.078565] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 949.078565] env[62070]: value = "task-1122107" [ 949.078565] env[62070]: _type = "Task" [ 949.078565] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.083898] env[62070]: DEBUG nova.scheduler.client.report [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Refreshing inventories for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 949.089131] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122107, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.101341] env[62070]: DEBUG nova.scheduler.client.report [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Updating ProviderTree inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 949.101705] env[62070]: DEBUG nova.compute.provider_tree [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 949.115361] env[62070]: DEBUG nova.scheduler.client.report [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Refreshing aggregate associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, aggregates: None {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 949.132142] env[62070]: DEBUG nova.scheduler.client.report [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Refreshing trait associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 949.204063] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122106, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.244047] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "interface-21bcb1a6-833b-48f3-8ee2-0e49c64a104f-60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.244368] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "interface-21bcb1a6-833b-48f3-8ee2-0e49c64a104f-60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.343033] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122103, 'name': Rename_Task, 'duration_secs': 1.191285} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.343033] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 949.343794] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e576b289-80a0-44e3-9a1b-c6fdd7a50d3a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.352456] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Waiting for the task: (returnval){ [ 949.352456] env[62070]: value = "task-1122108" [ 949.352456] env[62070]: _type = "Task" [ 949.352456] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.369096] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122108, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.389184] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8a0df2b-d8f0-4a35-930e-b24953a537df {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.400555] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c4254af-7e1e-490a-93e0-45c98bc3211e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.409464] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "1d595bc8-ab51-4443-bf32-079078f3133b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.409848] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "1d595bc8-ab51-4443-bf32-079078f3133b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.454314] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c50f1b1-c274-4a35-ba9a-bdecd0bfb1c4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.458423] env[62070]: INFO nova.compute.manager [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Took 33.44 seconds to build instance. [ 949.469328] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b270d61c-801e-4bab-b0be-98f33201f3a5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.489372] env[62070]: DEBUG nova.compute.provider_tree [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 949.534191] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 949.534827] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fe14e6cd-82f3-4f99-8443-5943a29c50a7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.547078] env[62070]: DEBUG oslo_vmware.api [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 949.547078] env[62070]: value = "task-1122109" [ 949.547078] env[62070]: _type = "Task" [ 949.547078] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.557603] env[62070]: DEBUG oslo_vmware.api [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122109, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.593110] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122107, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.703176] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122106, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.909054} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.703462] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 519cad6a-ebe0-42db-a19e-27249b83436e/519cad6a-ebe0-42db-a19e-27249b83436e.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 949.703708] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 949.703974] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c0f04ec6-3c07-4322-b8a7-395dc4320912 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.711890] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 949.711890] env[62070]: value = "task-1122110" [ 949.711890] env[62070]: _type = "Task" [ 949.711890] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.722144] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122110, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.746390] env[62070]: DEBUG nova.network.neutron [req-54c41c17-0903-414d-a0c6-3d4211e41b07 req-bbd094b6-304f-4d25-9e5e-7079a876a221 service nova] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Updated VIF entry in instance network info cache for port 0e90b544-5a90-4009-8f52-635e393cf106. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 949.746840] env[62070]: DEBUG nova.network.neutron [req-54c41c17-0903-414d-a0c6-3d4211e41b07 req-bbd094b6-304f-4d25-9e5e-7079a876a221 service nova] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Updating instance_info_cache with network_info: [{"id": "0e90b544-5a90-4009-8f52-635e393cf106", "address": "fa:16:3e:48:de:00", "network": {"id": "0d81bd04-b549-4e1f-97a2-0a0b9391dd3f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-108214409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c91e5eeeeb1742f499b2edaf76a93a3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e90b544-5a", "ovs_interfaceid": "0e90b544-5a90-4009-8f52-635e393cf106", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.749012] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.749337] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired lock "21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.750270] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c1f662e-581d-4105-8142-87e4955b176a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.773028] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c071d909-c5ab-49ba-8941-069016c0929f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.800445] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Reconfiguring VM to detach interface {{(pid=62070) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1974}} [ 949.801318] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf83d3a3-5084-4653-af05-21e0e12b0e98 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.820097] env[62070]: DEBUG oslo_vmware.api [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 949.820097] env[62070]: value = "task-1122111" [ 949.820097] env[62070]: _type = "Task" [ 949.820097] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.829228] env[62070]: DEBUG oslo_vmware.api [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122111, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.862667] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122108, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.912748] env[62070]: DEBUG nova.compute.manager [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 949.961558] env[62070]: DEBUG oslo_concurrency.lockutils [None req-40f8be5d-d37a-44a5-98e4-ab20fc83851e tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "e850734f-c49c-46d7-87ab-b0d6bed89d9b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.946s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.022104] env[62070]: DEBUG nova.scheduler.client.report [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Updated inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with generation 117 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 950.022409] env[62070]: DEBUG nova.compute.provider_tree [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Updating resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 generation from 117 to 118 during operation: update_inventory {{(pid=62070) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 950.022622] env[62070]: DEBUG nova.compute.provider_tree [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 950.056981] env[62070]: DEBUG oslo_vmware.api [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122109, 'name': PowerOffVM_Task, 'duration_secs': 0.331501} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.057250] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 950.058078] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d88d2d-c8bd-45a6-ae54-2daaa9b92039 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.062179] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a2ff1b3f-a3cc-4740-ac74-02cd61aa5bc0 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "e850734f-c49c-46d7-87ab-b0d6bed89d9b" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.062408] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a2ff1b3f-a3cc-4740-ac74-02cd61aa5bc0 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "e850734f-c49c-46d7-87ab-b0d6bed89d9b" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.062608] env[62070]: DEBUG nova.compute.manager [None req-a2ff1b3f-a3cc-4740-ac74-02cd61aa5bc0 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 950.063749] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b86aaa1e-a660-4100-a2af-7eec8efd0bfc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.086461] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7cdb054-c7bf-4a4f-840f-d8aaeacb284f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.090569] env[62070]: DEBUG nova.compute.manager [None req-a2ff1b3f-a3cc-4740-ac74-02cd61aa5bc0 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62070) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 950.091156] env[62070]: DEBUG nova.objects.instance [None req-a2ff1b3f-a3cc-4740-ac74-02cd61aa5bc0 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lazy-loading 'flavor' on Instance uuid e850734f-c49c-46d7-87ab-b0d6bed89d9b {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 950.097247] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122107, 'name': ReconfigVM_Task, 'duration_secs': 0.576904} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.099434] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 2c58db1d-405f-4489-85db-c74723be4a8d/2c58db1d-405f-4489-85db-c74723be4a8d.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 950.102195] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9db5e822-dd7f-4993-b285-121e890a0718 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.109175] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 950.109175] env[62070]: value = "task-1122112" [ 950.109175] env[62070]: _type = "Task" [ 950.109175] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.118158] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122112, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.126141] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 950.126414] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1db00ec8-da63-46eb-bd64-ac332ae6f41b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.134400] env[62070]: DEBUG oslo_vmware.api [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 950.134400] env[62070]: value = "task-1122113" [ 950.134400] env[62070]: _type = "Task" [ 950.134400] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.143644] env[62070]: DEBUG oslo_vmware.api [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122113, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.222567] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122110, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080667} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.222859] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 950.223652] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d1fc2ca-83d5-45bc-a4eb-fbe3049aec96 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.245645] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 519cad6a-ebe0-42db-a19e-27249b83436e/519cad6a-ebe0-42db-a19e-27249b83436e.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 950.246236] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-085123db-3bc1-4def-b510-6a9697742ef6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.260407] env[62070]: DEBUG oslo_concurrency.lockutils [req-54c41c17-0903-414d-a0c6-3d4211e41b07 req-bbd094b6-304f-4d25-9e5e-7079a876a221 service nova] Releasing lock "refresh_cache-84c00e4a-20d3-4739-8535-e27076d85a89" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.266681] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 950.266681] env[62070]: value = "task-1122114" [ 950.266681] env[62070]: _type = "Task" [ 950.266681] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.274723] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122114, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.330652] env[62070]: DEBUG oslo_vmware.api [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122111, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.363166] env[62070]: DEBUG oslo_vmware.api [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122108, 'name': PowerOnVM_Task, 'duration_secs': 0.662981} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.363523] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 950.363740] env[62070]: DEBUG nova.compute.manager [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 950.364564] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ad85c98-1028-42f6-9e68-5ad6d961f91f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.433677] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.596388] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2ff1b3f-a3cc-4740-ac74-02cd61aa5bc0 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 950.596674] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-69b12432-503d-487f-9b39-0cee811c3e61 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.605254] env[62070]: DEBUG oslo_vmware.api [None req-a2ff1b3f-a3cc-4740-ac74-02cd61aa5bc0 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 950.605254] env[62070]: value = "task-1122115" [ 950.605254] env[62070]: _type = "Task" [ 950.605254] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.617943] env[62070]: DEBUG oslo_vmware.api [None req-a2ff1b3f-a3cc-4740-ac74-02cd61aa5bc0 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122115, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.623370] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122112, 'name': Rename_Task, 'duration_secs': 0.229181} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.623632] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 950.623871] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1835c26f-fece-43b6-952e-1858ed8688e0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.630178] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 950.630178] env[62070]: value = "task-1122116" [ 950.630178] env[62070]: _type = "Task" [ 950.630178] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.638289] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122116, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.646939] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] VM already powered off {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 950.647283] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 950.647657] env[62070]: DEBUG oslo_concurrency.lockutils [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.647841] env[62070]: DEBUG oslo_concurrency.lockutils [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.648060] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 950.648363] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-769f00e1-b838-4f1d-942c-e0349edd1679 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.677271] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 950.677487] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 950.678260] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ff3034f-510b-4d4f-a689-88b7ef4e9a6a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.685072] env[62070]: DEBUG oslo_vmware.api [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 950.685072] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52a203eb-43c2-2030-ab80-4ea133a9bd84" [ 950.685072] env[62070]: _type = "Task" [ 950.685072] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.693821] env[62070]: DEBUG oslo_vmware.api [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52a203eb-43c2-2030-ab80-4ea133a9bd84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.777095] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122114, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.831740] env[62070]: DEBUG oslo_vmware.api [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122111, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.882582] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.032602] env[62070]: DEBUG oslo_concurrency.lockutils [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.374s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.035414] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 25.899s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.115084] env[62070]: DEBUG oslo_vmware.api [None req-a2ff1b3f-a3cc-4740-ac74-02cd61aa5bc0 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122115, 'name': PowerOffVM_Task, 'duration_secs': 0.369451} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.115373] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2ff1b3f-a3cc-4740-ac74-02cd61aa5bc0 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 951.115578] env[62070]: DEBUG nova.compute.manager [None req-a2ff1b3f-a3cc-4740-ac74-02cd61aa5bc0 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 951.116348] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9af3c72-084d-4641-9d32-a06e516dd758 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.139290] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122116, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.195831] env[62070]: DEBUG oslo_vmware.api [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52a203eb-43c2-2030-ab80-4ea133a9bd84, 'name': SearchDatastore_Task, 'duration_secs': 0.018652} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.196629] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6766226-733c-446f-be98-5bc44fbe8de6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.202368] env[62070]: DEBUG oslo_vmware.api [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 951.202368] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]528ddf46-e673-14b5-f6a1-1c7ab4928ec1" [ 951.202368] env[62070]: _type = "Task" [ 951.202368] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.218923] env[62070]: DEBUG oslo_vmware.api [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]528ddf46-e673-14b5-f6a1-1c7ab4928ec1, 'name': SearchDatastore_Task, 'duration_secs': 0.012294} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.219159] env[62070]: DEBUG oslo_concurrency.lockutils [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.219436] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 7dc27fe6-495f-498d-88fe-a99ddc19a21c/43ea607c-7ece-4601-9b11-75c6a16aa7dd-rescue.vmdk. {{(pid=62070) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 951.219721] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-32b38c0e-4e10-4ed2-b266-1f7f872c0885 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.226796] env[62070]: DEBUG oslo_vmware.api [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 951.226796] env[62070]: value = "task-1122117" [ 951.226796] env[62070]: _type = "Task" [ 951.226796] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.235497] env[62070]: DEBUG oslo_vmware.api [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122117, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.277534] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122114, 'name': ReconfigVM_Task, 'duration_secs': 0.798183} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.278190] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 519cad6a-ebe0-42db-a19e-27249b83436e/519cad6a-ebe0-42db-a19e-27249b83436e.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 951.278865] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c28bce43-123e-43a0-a20a-f95ad7da5984 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.286690] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 951.286690] env[62070]: value = "task-1122118" [ 951.286690] env[62070]: _type = "Task" [ 951.286690] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.295595] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122118, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.332779] env[62070]: DEBUG oslo_vmware.api [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122111, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.616523] env[62070]: INFO nova.scheduler.client.report [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Deleted allocation for migration 06237253-febe-4e76-9585-3c95ef9cd7fe [ 951.628252] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a2ff1b3f-a3cc-4740-ac74-02cd61aa5bc0 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "e850734f-c49c-46d7-87ab-b0d6bed89d9b" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.566s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.642602] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122116, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.739008] env[62070]: DEBUG oslo_vmware.api [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122117, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.453751} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.739324] env[62070]: INFO nova.virt.vmwareapi.ds_util [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 7dc27fe6-495f-498d-88fe-a99ddc19a21c/43ea607c-7ece-4601-9b11-75c6a16aa7dd-rescue.vmdk. [ 951.740252] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dce30392-27e6-419c-a880-b4f9c3028d50 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.769350] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] 7dc27fe6-495f-498d-88fe-a99ddc19a21c/43ea607c-7ece-4601-9b11-75c6a16aa7dd-rescue.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 951.769713] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b47cd1c-088c-4a75-a901-0e7b6bd77b5c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.789615] env[62070]: DEBUG oslo_vmware.api [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 951.789615] env[62070]: value = "task-1122119" [ 951.789615] env[62070]: _type = "Task" [ 951.789615] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.800519] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Acquiring lock "1440361b-d3b4-4c1c-995c-fe7ff99ee297" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.800775] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Lock "1440361b-d3b4-4c1c-995c-fe7ff99ee297" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.800986] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Acquiring lock "1440361b-d3b4-4c1c-995c-fe7ff99ee297-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.801198] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Lock "1440361b-d3b4-4c1c-995c-fe7ff99ee297-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.801371] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Lock "1440361b-d3b4-4c1c-995c-fe7ff99ee297-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.802968] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122118, 'name': Rename_Task, 'duration_secs': 0.203962} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.806113] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 951.806380] env[62070]: DEBUG oslo_vmware.api [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122119, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.806846] env[62070]: INFO nova.compute.manager [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Terminating instance [ 951.808247] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dfcbc696-e260-4961-8d5f-84c401bb9d64 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.809970] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Acquiring lock "refresh_cache-1440361b-d3b4-4c1c-995c-fe7ff99ee297" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.810213] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Acquired lock "refresh_cache-1440361b-d3b4-4c1c-995c-fe7ff99ee297" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.810323] env[62070]: DEBUG nova.network.neutron [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 951.817280] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 951.817280] env[62070]: value = "task-1122120" [ 951.817280] env[62070]: _type = "Task" [ 951.817280] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.829113] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122120, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.834899] env[62070]: DEBUG oslo_vmware.api [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122111, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.081977] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 71aead12-a182-40a7-b5a9-91c01271b800 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.082179] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 67e99ada-a8e6-4034-b19b-5b2cb883b735 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.082327] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance a3c42653-9a4b-42d3-bc38-8d46d95c8f64 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.082475] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 3d22f50a-e1b7-48f9-a044-df64d01dfeb4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.082617] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 5ec9074b-1237-4404-b13c-a7ca0dbe1d43 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.082752] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 4a5f644a-1670-4c6b-a762-f87f1ee4cce5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.082873] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 21bcb1a6-833b-48f3-8ee2-0e49c64a104f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.082986] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 7dc27fe6-495f-498d-88fe-a99ddc19a21c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.083123] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance fb054a32-c1aa-4884-a087-da5ad34cf3c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.083252] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance cf52cee8-874e-44e8-a36e-49ac20f3e312 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.083370] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 1440361b-d3b4-4c1c-995c-fe7ff99ee297 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.083503] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 84c00e4a-20d3-4739-8535-e27076d85a89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.083642] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance e850734f-c49c-46d7-87ab-b0d6bed89d9b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.083765] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 2c58db1d-405f-4489-85db-c74723be4a8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.083909] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 519cad6a-ebe0-42db-a19e-27249b83436e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.123559] env[62070]: DEBUG oslo_concurrency.lockutils [None req-89816dd1-86bd-4fe3-a58a-8a014d471f3f tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "fb054a32-c1aa-4884-a087-da5ad34cf3c4" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 33.322s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.143076] env[62070]: DEBUG oslo_vmware.api [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122116, 'name': PowerOnVM_Task, 'duration_secs': 1.033301} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.143076] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 952.143299] env[62070]: INFO nova.compute.manager [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Took 9.65 seconds to spawn the instance on the hypervisor. [ 952.143615] env[62070]: DEBUG nova.compute.manager [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 952.144942] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37826722-f593-42a0-b382-35be4b77aa3a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.303487] env[62070]: DEBUG oslo_vmware.api [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122119, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.330346] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122120, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.331676] env[62070]: DEBUG nova.network.neutron [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 952.336554] env[62070]: DEBUG oslo_vmware.api [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122111, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.410578] env[62070]: DEBUG nova.network.neutron [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.590902] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 53a1791d-38fd-4721-b82c-2f0922348300 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.665034] env[62070]: INFO nova.compute.manager [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Took 36.54 seconds to build instance. [ 952.672456] env[62070]: DEBUG oslo_concurrency.lockutils [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "e850734f-c49c-46d7-87ab-b0d6bed89d9b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.672806] env[62070]: DEBUG oslo_concurrency.lockutils [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "e850734f-c49c-46d7-87ab-b0d6bed89d9b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.673076] env[62070]: DEBUG oslo_concurrency.lockutils [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "e850734f-c49c-46d7-87ab-b0d6bed89d9b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.673311] env[62070]: DEBUG oslo_concurrency.lockutils [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "e850734f-c49c-46d7-87ab-b0d6bed89d9b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.673535] env[62070]: DEBUG oslo_concurrency.lockutils [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "e850734f-c49c-46d7-87ab-b0d6bed89d9b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.676211] env[62070]: INFO nova.compute.manager [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Terminating instance [ 952.678391] env[62070]: DEBUG nova.compute.manager [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 952.679033] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 952.679478] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48589c03-9a91-4ed3-9c45-5eee1f13037b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.689094] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 952.689381] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a7f5674a-7c22-4843-ac33-627fa5f313fb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.779346] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 952.779584] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 952.779929] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Deleting the datastore file [datastore2] e850734f-c49c-46d7-87ab-b0d6bed89d9b {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 952.780218] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9a5bca67-2627-4e91-8d50-359146ac5972 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.789061] env[62070]: DEBUG oslo_vmware.api [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 952.789061] env[62070]: value = "task-1122122" [ 952.789061] env[62070]: _type = "Task" [ 952.789061] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.800113] env[62070]: DEBUG oslo_vmware.api [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122122, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.805714] env[62070]: DEBUG oslo_vmware.api [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122119, 'name': ReconfigVM_Task, 'duration_secs': 0.876089} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.806043] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Reconfigured VM instance instance-0000004e to attach disk [datastore2] 7dc27fe6-495f-498d-88fe-a99ddc19a21c/43ea607c-7ece-4601-9b11-75c6a16aa7dd-rescue.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 952.806913] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73bea21-c026-45cf-a9ef-d6a00d86dcbc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.846201] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b6adc3a-69bb-42a1-84ed-356ad9ade8c9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.867516] env[62070]: DEBUG oslo_vmware.api [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122120, 'name': PowerOnVM_Task, 'duration_secs': 0.95547} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.872177] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 952.872598] env[62070]: INFO nova.compute.manager [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Took 8.03 seconds to spawn the instance on the hypervisor. [ 952.872598] env[62070]: DEBUG nova.compute.manager [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 952.872890] env[62070]: DEBUG oslo_vmware.api [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122111, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.873179] env[62070]: DEBUG oslo_vmware.api [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 952.873179] env[62070]: value = "task-1122123" [ 952.873179] env[62070]: _type = "Task" [ 952.873179] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.874478] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37911546-5c73-4139-a1b7-aff2a75d1473 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.888499] env[62070]: DEBUG oslo_vmware.api [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122123, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.919351] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Releasing lock "refresh_cache-1440361b-d3b4-4c1c-995c-fe7ff99ee297" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.920045] env[62070]: DEBUG nova.compute.manager [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 952.920178] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 952.921121] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92260585-a5ef-4560-bdf3-630ec1b268e7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.931140] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 952.931414] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-28537e70-b953-4769-89db-404aa2a61ea3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.940054] env[62070]: DEBUG oslo_vmware.api [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Waiting for the task: (returnval){ [ 952.940054] env[62070]: value = "task-1122124" [ 952.940054] env[62070]: _type = "Task" [ 952.940054] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.950361] env[62070]: DEBUG oslo_vmware.api [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122124, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.095173] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance a5cba512-9b50-4ca3-93eb-345be12dc588 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 953.167350] env[62070]: DEBUG oslo_concurrency.lockutils [None req-87b139b2-90a5-4b70-b835-8cb543d252a2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "2c58db1d-405f-4489-85db-c74723be4a8d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.053s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.299384] env[62070]: DEBUG oslo_vmware.api [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122122, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.305864} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.299645] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 953.299828] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 953.300012] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 953.300191] env[62070]: INFO nova.compute.manager [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Took 0.62 seconds to destroy the instance on the hypervisor. [ 953.300455] env[62070]: DEBUG oslo.service.loopingcall [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 953.300692] env[62070]: DEBUG nova.compute.manager [-] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 953.300795] env[62070]: DEBUG nova.network.neutron [-] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 953.346230] env[62070]: DEBUG oslo_vmware.api [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122111, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.361226] env[62070]: DEBUG oslo_concurrency.lockutils [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "fb054a32-c1aa-4884-a087-da5ad34cf3c4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.361532] env[62070]: DEBUG oslo_concurrency.lockutils [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "fb054a32-c1aa-4884-a087-da5ad34cf3c4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.361759] env[62070]: DEBUG oslo_concurrency.lockutils [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "fb054a32-c1aa-4884-a087-da5ad34cf3c4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.361951] env[62070]: DEBUG oslo_concurrency.lockutils [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "fb054a32-c1aa-4884-a087-da5ad34cf3c4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.362146] env[62070]: DEBUG oslo_concurrency.lockutils [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "fb054a32-c1aa-4884-a087-da5ad34cf3c4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.364879] env[62070]: INFO nova.compute.manager [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Terminating instance [ 953.366803] env[62070]: DEBUG nova.compute.manager [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 953.366993] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 953.367930] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c31ff9-59f7-42ee-99f5-321e36a03786 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.376650] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 953.376650] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9c094c02-2459-43da-aaca-06f0fd289386 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.390858] env[62070]: DEBUG oslo_vmware.api [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122123, 'name': ReconfigVM_Task, 'duration_secs': 0.342023} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.392242] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 953.396516] env[62070]: DEBUG oslo_vmware.api [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 953.396516] env[62070]: value = "task-1122125" [ 953.396516] env[62070]: _type = "Task" [ 953.396516] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.399276] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-10ed8711-b83a-4139-9860-c08c9b9589c4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.403955] env[62070]: INFO nova.compute.manager [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Took 36.11 seconds to build instance. [ 953.412044] env[62070]: DEBUG oslo_vmware.api [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122125, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.413917] env[62070]: DEBUG oslo_vmware.api [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 953.413917] env[62070]: value = "task-1122126" [ 953.413917] env[62070]: _type = "Task" [ 953.413917] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.422752] env[62070]: DEBUG oslo_vmware.api [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122126, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.453349] env[62070]: DEBUG oslo_vmware.api [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122124, 'name': PowerOffVM_Task, 'duration_secs': 0.349456} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.454284] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 953.454284] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 953.454284] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-05dfae8d-ea99-4fb1-96e1-557400d2eaa3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.485980] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 953.486280] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 953.486689] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Deleting the datastore file [datastore2] 1440361b-d3b4-4c1c-995c-fe7ff99ee297 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 953.486781] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9448bc4b-d038-4daa-9df2-71885ff29fa5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.496053] env[62070]: DEBUG oslo_vmware.api [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Waiting for the task: (returnval){ [ 953.496053] env[62070]: value = "task-1122128" [ 953.496053] env[62070]: _type = "Task" [ 953.496053] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.506093] env[62070]: DEBUG oslo_vmware.api [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122128, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.598290] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 1d595bc8-ab51-4443-bf32-079078f3133b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 953.601024] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Total usable vcpus: 48, total allocated vcpus: 15 {{(pid=62070) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 953.601024] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3456MB phys_disk=200GB used_disk=15GB total_vcpus=48 used_vcpus=15 pci_stats=[] {{(pid=62070) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 953.777705] env[62070]: DEBUG nova.compute.manager [req-08d5cdef-12a3-4ec5-abe9-5505d3e9a4b3 req-64912488-3afe-4981-8492-8d9d673d8c68 service nova] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Received event network-vif-deleted-7fbe6487-eae6-49f2-894f-82f8519f4232 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 953.777965] env[62070]: INFO nova.compute.manager [req-08d5cdef-12a3-4ec5-abe9-5505d3e9a4b3 req-64912488-3afe-4981-8492-8d9d673d8c68 service nova] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Neutron deleted interface 7fbe6487-eae6-49f2-894f-82f8519f4232; detaching it from the instance and deleting it from the info cache [ 953.778316] env[62070]: DEBUG nova.network.neutron [req-08d5cdef-12a3-4ec5-abe9-5505d3e9a4b3 req-64912488-3afe-4981-8492-8d9d673d8c68 service nova] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.848259] env[62070]: DEBUG oslo_vmware.api [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122111, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.875685] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec41767-6eaa-4836-9c53-aac8e73cf183 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.882874] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "b101c79a-abfd-4104-aaed-096995fb2337" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.883921] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "b101c79a-abfd-4104-aaed-096995fb2337" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.887749] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6b4d660-487c-46a2-b5b1-8d414e86cc98 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.922569] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c8069d9-861b-46c0-a241-ffab59e77207 tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "519cad6a-ebe0-42db-a19e-27249b83436e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.634s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.929653] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b14da41-6ed6-4817-9d5c-0db198012942 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.943822] env[62070]: DEBUG oslo_vmware.api [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122125, 'name': PowerOffVM_Task, 'duration_secs': 0.204164} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.948106] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b84340-b9ff-4e09-b798-30fc506dcdd0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.950863] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 953.951075] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 953.951367] env[62070]: DEBUG oslo_vmware.api [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122126, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.951590] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-41a74610-62fe-444f-95ed-0b3941749931 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.964800] env[62070]: DEBUG nova.compute.provider_tree [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 954.007807] env[62070]: DEBUG oslo_vmware.api [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Task: {'id': task-1122128, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154145} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.008097] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 954.008287] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 954.008514] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 954.008712] env[62070]: INFO nova.compute.manager [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Took 1.09 seconds to destroy the instance on the hypervisor. [ 954.008961] env[62070]: DEBUG oslo.service.loopingcall [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 954.009544] env[62070]: DEBUG nova.compute.manager [-] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 954.009653] env[62070]: DEBUG nova.network.neutron [-] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 954.011545] env[62070]: INFO nova.compute.manager [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Rebuilding instance [ 954.029896] env[62070]: DEBUG nova.network.neutron [-] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 954.036391] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 954.036933] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 954.036933] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Deleting the datastore file [datastore2] fb054a32-c1aa-4884-a087-da5ad34cf3c4 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 954.037132] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b34af7c-170b-498a-a9db-747c17369572 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.050039] env[62070]: DEBUG oslo_vmware.api [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 954.050039] env[62070]: value = "task-1122130" [ 954.050039] env[62070]: _type = "Task" [ 954.050039] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.059052] env[62070]: DEBUG oslo_vmware.api [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122130, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.060459] env[62070]: DEBUG nova.network.neutron [-] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.069504] env[62070]: DEBUG nova.compute.manager [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 954.070427] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-033f3935-bb08-4691-8352-062372ed9f9c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.283933] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-04acbe71-8ba7-4320-88b3-a56cb1945ff7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.296246] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9724cf5d-112c-4d7e-ba74-b118afaf5256 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.340228] env[62070]: DEBUG nova.compute.manager [req-08d5cdef-12a3-4ec5-abe9-5505d3e9a4b3 req-64912488-3afe-4981-8492-8d9d673d8c68 service nova] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Detach interface failed, port_id=7fbe6487-eae6-49f2-894f-82f8519f4232, reason: Instance e850734f-c49c-46d7-87ab-b0d6bed89d9b could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 954.353320] env[62070]: DEBUG oslo_vmware.api [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122111, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.387020] env[62070]: DEBUG nova.compute.manager [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 954.433651] env[62070]: DEBUG oslo_vmware.api [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122126, 'name': PowerOnVM_Task, 'duration_secs': 0.556842} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.434015] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 954.436923] env[62070]: DEBUG nova.compute.manager [None req-abee6904-6b65-479c-bc30-34040834c445 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 954.437863] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be14c499-ee1f-48a3-aafd-59328b4e99a1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.485222] env[62070]: ERROR nova.scheduler.client.report [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [req-fe693aa1-0537-415a-99db-4b48c31f2784] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 21c7c111-1b69-4468-b2c4-5dd96014fbd6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-fe693aa1-0537-415a-99db-4b48c31f2784"}]} [ 954.501718] env[62070]: DEBUG nova.scheduler.client.report [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Refreshing inventories for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 954.516035] env[62070]: DEBUG nova.scheduler.client.report [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Updating ProviderTree inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 954.516230] env[62070]: DEBUG nova.compute.provider_tree [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 954.528550] env[62070]: DEBUG nova.scheduler.client.report [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Refreshing aggregate associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, aggregates: None {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 954.534990] env[62070]: DEBUG nova.network.neutron [-] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.550093] env[62070]: DEBUG nova.scheduler.client.report [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Refreshing trait associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 954.563752] env[62070]: INFO nova.compute.manager [-] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Took 1.26 seconds to deallocate network for instance. [ 954.564202] env[62070]: DEBUG oslo_vmware.api [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122130, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.311286} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.566027] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 954.566224] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 954.566408] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 954.566583] env[62070]: INFO nova.compute.manager [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Took 1.20 seconds to destroy the instance on the hypervisor. [ 954.566837] env[62070]: DEBUG oslo.service.loopingcall [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 954.570069] env[62070]: DEBUG nova.compute.manager [-] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 954.570168] env[62070]: DEBUG nova.network.neutron [-] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 954.582249] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 954.582470] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f5d67b03-a604-4d07-9f4a-bbb2ab5050dd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.596771] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 954.596771] env[62070]: value = "task-1122131" [ 954.596771] env[62070]: _type = "Task" [ 954.596771] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.617031] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122131, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.771019] env[62070]: DEBUG nova.compute.manager [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 954.771019] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-410db8c2-589a-4a9d-be95-4b619701a8ca {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.851497] env[62070]: DEBUG oslo_vmware.api [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122111, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.865518] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9fa8c4-3af9-4793-82ab-c64115d7cd6e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.874500] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a630c3-3a8e-42fa-b863-7292deaa916a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.912881] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3add24d7-7ff2-4e57-9b70-88430ab91541 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.922841] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6989d4-3268-4868-9bdc-d290d510f13a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.928061] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.939657] env[62070]: DEBUG nova.compute.provider_tree [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 955.037814] env[62070]: INFO nova.compute.manager [-] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Took 1.03 seconds to deallocate network for instance. [ 955.073412] env[62070]: DEBUG oslo_concurrency.lockutils [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.107008] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122131, 'name': PowerOffVM_Task, 'duration_secs': 0.388007} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.107314] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 955.107601] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 955.108917] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b3c834b-cbd5-4be5-b982-a4f2c70626f8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.116864] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 955.117140] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-23024768-34a2-4082-adc4-6b37cb9c1352 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.287064] env[62070]: INFO nova.compute.manager [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] instance snapshotting [ 955.290779] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d21b43-f2fc-4793-b746-4fcc38d8f544 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.311558] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d483ac-7c5d-41cd-bd4b-8641bc5c951d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.337367] env[62070]: DEBUG nova.network.neutron [-] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.351484] env[62070]: DEBUG oslo_vmware.api [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122111, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.474855] env[62070]: DEBUG nova.scheduler.client.report [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Updated inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with generation 120 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 955.475137] env[62070]: DEBUG nova.compute.provider_tree [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Updating resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 generation from 120 to 121 during operation: update_inventory {{(pid=62070) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 955.475343] env[62070]: DEBUG nova.compute.provider_tree [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 955.544892] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.631902] env[62070]: INFO nova.compute.manager [None req-66ad677e-44ea-4425-98ea-51d271a8e44b tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Unrescuing [ 955.632200] env[62070]: DEBUG oslo_concurrency.lockutils [None req-66ad677e-44ea-4425-98ea-51d271a8e44b tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "refresh_cache-7dc27fe6-495f-498d-88fe-a99ddc19a21c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.632359] env[62070]: DEBUG oslo_concurrency.lockutils [None req-66ad677e-44ea-4425-98ea-51d271a8e44b tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquired lock "refresh_cache-7dc27fe6-495f-498d-88fe-a99ddc19a21c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.632540] env[62070]: DEBUG nova.network.neutron [None req-66ad677e-44ea-4425-98ea-51d271a8e44b tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 955.802743] env[62070]: DEBUG nova.compute.manager [req-95829b78-9135-4bc8-8f9a-085f858a89a0 req-83158204-c82e-42f7-a96d-6591ed5fcb89 service nova] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Received event network-vif-deleted-32f47285-7ff0-405e-849d-27e73999e359 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 955.822483] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Creating Snapshot of the VM instance {{(pid=62070) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 955.822793] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-314855a0-156f-43f5-9140-7fa20cf04f7d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.831233] env[62070]: DEBUG oslo_vmware.api [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 955.831233] env[62070]: value = "task-1122133" [ 955.831233] env[62070]: _type = "Task" [ 955.831233] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.839545] env[62070]: INFO nova.compute.manager [-] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Took 1.27 seconds to deallocate network for instance. [ 955.839812] env[62070]: DEBUG oslo_vmware.api [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122133, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.852846] env[62070]: DEBUG oslo_vmware.api [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122111, 'name': ReconfigVM_Task, 'duration_secs': 5.803685} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.853142] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Releasing lock "21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.853142] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Reconfigured VM to detach interface {{(pid=62070) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1984}} [ 955.980293] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62070) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 955.980540] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.945s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.980821] env[62070]: DEBUG oslo_concurrency.lockutils [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.766s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.982634] env[62070]: INFO nova.compute.claims [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 956.341357] env[62070]: DEBUG oslo_vmware.api [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122133, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.345873] env[62070]: DEBUG oslo_concurrency.lockutils [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.409165] env[62070]: DEBUG nova.network.neutron [None req-66ad677e-44ea-4425-98ea-51d271a8e44b tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Updating instance_info_cache with network_info: [{"id": "cac26624-11c7-45a9-acb3-3e86b7232ab2", "address": "fa:16:3e:8c:68:96", "network": {"id": "754f4ec8-0bc6-4726-8b88-1a4e1a326699", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-293486644-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a94db233e3a43dc9aa7cf887c6cb1f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcac26624-11", "ovs_interfaceid": "cac26624-11c7-45a9-acb3-3e86b7232ab2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.841729] env[62070]: DEBUG oslo_vmware.api [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122133, 'name': CreateSnapshot_Task, 'duration_secs': 0.88072} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.841996] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Created Snapshot of the VM instance {{(pid=62070) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 956.842814] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d09b3c5-935f-4af7-9aee-928e1513fc38 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.912173] env[62070]: DEBUG oslo_concurrency.lockutils [None req-66ad677e-44ea-4425-98ea-51d271a8e44b tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Releasing lock "refresh_cache-7dc27fe6-495f-498d-88fe-a99ddc19a21c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.912802] env[62070]: DEBUG nova.objects.instance [None req-66ad677e-44ea-4425-98ea-51d271a8e44b tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lazy-loading 'flavor' on Instance uuid 7dc27fe6-495f-498d-88fe-a99ddc19a21c {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 957.201032] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.201032] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.201032] env[62070]: DEBUG nova.network.neutron [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 957.206864] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11665bde-da99-48b8-b3ec-41e63fb6d095 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.217149] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b223f1-0c4b-415f-89a5-c20cf22427a7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.247554] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49cb1495-955a-4f7c-9751-d20054fb25c3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.255579] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3891ad0-09fb-46f9-9d25-b366035ed292 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.269467] env[62070]: DEBUG nova.compute.provider_tree [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 957.359875] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Creating linked-clone VM from snapshot {{(pid=62070) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 957.360210] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3d5b9092-b3c1-4674-af41-bbb366d486a0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.369949] env[62070]: DEBUG oslo_vmware.api [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 957.369949] env[62070]: value = "task-1122134" [ 957.369949] env[62070]: _type = "Task" [ 957.369949] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.378153] env[62070]: DEBUG oslo_vmware.api [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122134, 'name': CloneVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.422448] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79792991-df6a-41c4-a7cb-3e8bef8eff09 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.446129] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-66ad677e-44ea-4425-98ea-51d271a8e44b tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 957.446458] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6b34ae62-f29e-408c-90b4-4e771f44840b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.453985] env[62070]: DEBUG oslo_vmware.api [None req-66ad677e-44ea-4425-98ea-51d271a8e44b tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 957.453985] env[62070]: value = "task-1122135" [ 957.453985] env[62070]: _type = "Task" [ 957.453985] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.464271] env[62070]: DEBUG oslo_vmware.api [None req-66ad677e-44ea-4425-98ea-51d271a8e44b tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122135, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.572500] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 957.572718] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 957.572997] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Deleting the datastore file [datastore1] 2c58db1d-405f-4489-85db-c74723be4a8d {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 957.573351] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b1ed561d-183d-4a70-9c41-af254ea146c5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.580915] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 957.580915] env[62070]: value = "task-1122136" [ 957.580915] env[62070]: _type = "Task" [ 957.580915] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.593458] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122136, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.772804] env[62070]: DEBUG nova.scheduler.client.report [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 957.880982] env[62070]: DEBUG oslo_vmware.api [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122134, 'name': CloneVM_Task} progress is 94%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.918138] env[62070]: INFO nova.network.neutron [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Port 60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 957.918565] env[62070]: DEBUG nova.network.neutron [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Updating instance_info_cache with network_info: [{"id": "45420f68-e309-4569-8dac-28e16d9417d7", "address": "fa:16:3e:08:c2:70", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45420f68-e3", "ovs_interfaceid": "45420f68-e309-4569-8dac-28e16d9417d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.964894] env[62070]: DEBUG oslo_vmware.api [None req-66ad677e-44ea-4425-98ea-51d271a8e44b tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122135, 'name': PowerOffVM_Task, 'duration_secs': 0.423745} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.965375] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-66ad677e-44ea-4425-98ea-51d271a8e44b tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 957.970810] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-66ad677e-44ea-4425-98ea-51d271a8e44b tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Reconfiguring VM instance instance-0000004e to detach disk 2002 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 957.971187] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4075893d-0a2c-49e0-9fe2-b482db11832a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.995406] env[62070]: DEBUG oslo_vmware.api [None req-66ad677e-44ea-4425-98ea-51d271a8e44b tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 957.995406] env[62070]: value = "task-1122137" [ 957.995406] env[62070]: _type = "Task" [ 957.995406] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.010060] env[62070]: DEBUG oslo_vmware.api [None req-66ad677e-44ea-4425-98ea-51d271a8e44b tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122137, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.091383] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122136, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.242868} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.091700] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 958.091928] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 958.093241] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 958.278804] env[62070]: DEBUG oslo_concurrency.lockutils [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.298s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.279546] env[62070]: DEBUG nova.compute.manager [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 958.284703] env[62070]: DEBUG nova.compute.manager [req-9aea1cc4-de20-42d4-9be9-ca48de7e93a3 req-0d2d6c48-32d0-4927-9b3c-48ab9020fddd service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Received event network-changed-45420f68-e309-4569-8dac-28e16d9417d7 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 958.284703] env[62070]: DEBUG nova.compute.manager [req-9aea1cc4-de20-42d4-9be9-ca48de7e93a3 req-0d2d6c48-32d0-4927-9b3c-48ab9020fddd service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Refreshing instance network info cache due to event network-changed-45420f68-e309-4569-8dac-28e16d9417d7. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 958.284957] env[62070]: DEBUG oslo_concurrency.lockutils [req-9aea1cc4-de20-42d4-9be9-ca48de7e93a3 req-0d2d6c48-32d0-4927-9b3c-48ab9020fddd service nova] Acquiring lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.285506] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.619s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.287700] env[62070]: INFO nova.compute.claims [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 958.381247] env[62070]: DEBUG oslo_vmware.api [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122134, 'name': CloneVM_Task} progress is 95%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.422031] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Releasing lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.424331] env[62070]: DEBUG oslo_concurrency.lockutils [req-9aea1cc4-de20-42d4-9be9-ca48de7e93a3 req-0d2d6c48-32d0-4927-9b3c-48ab9020fddd service nova] Acquired lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.424543] env[62070]: DEBUG nova.network.neutron [req-9aea1cc4-de20-42d4-9be9-ca48de7e93a3 req-0d2d6c48-32d0-4927-9b3c-48ab9020fddd service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Refreshing network info cache for port 45420f68-e309-4569-8dac-28e16d9417d7 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 958.507128] env[62070]: DEBUG oslo_vmware.api [None req-66ad677e-44ea-4425-98ea-51d271a8e44b tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122137, 'name': ReconfigVM_Task, 'duration_secs': 0.284114} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.507981] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-66ad677e-44ea-4425-98ea-51d271a8e44b tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Reconfigured VM instance instance-0000004e to detach disk 2002 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 958.508199] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-66ad677e-44ea-4425-98ea-51d271a8e44b tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 958.508464] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5c4aac16-e0b8-464b-85bf-2e4cf5c5ce6d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.516668] env[62070]: DEBUG oslo_vmware.api [None req-66ad677e-44ea-4425-98ea-51d271a8e44b tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 958.516668] env[62070]: value = "task-1122138" [ 958.516668] env[62070]: _type = "Task" [ 958.516668] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.529799] env[62070]: DEBUG oslo_vmware.api [None req-66ad677e-44ea-4425-98ea-51d271a8e44b tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122138, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.787140] env[62070]: DEBUG nova.compute.utils [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 958.788427] env[62070]: DEBUG nova.compute.manager [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 958.788604] env[62070]: DEBUG nova.network.neutron [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 958.827979] env[62070]: DEBUG nova.policy [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6568006b61514963b9c4c4d1b1330c65', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '772f7fcee5f44b899b6df797e1ed5ddd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 958.884770] env[62070]: DEBUG oslo_vmware.api [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122134, 'name': CloneVM_Task, 'duration_secs': 1.125844} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.884770] env[62070]: INFO nova.virt.vmwareapi.vmops [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Created linked-clone VM from snapshot [ 958.884770] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a00f5455-9c07-40e8-ace0-55e9af9be88d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.893074] env[62070]: DEBUG nova.virt.vmwareapi.images [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Uploading image 26ae3f8a-c05b-4bd4-8dc9-ea50dab18ec1 {{(pid=62070) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 958.907034] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Destroying the VM {{(pid=62070) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 958.907034] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1c4b1a60-cd46-4fd9-bd0b-dae72c0bb63f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.915382] env[62070]: DEBUG oslo_vmware.api [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 958.915382] env[62070]: value = "task-1122139" [ 958.915382] env[62070]: _type = "Task" [ 958.915382] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.928086] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c61ce357-31c8-431b-963a-c1a550dc6823 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "interface-21bcb1a6-833b-48f3-8ee2-0e49c64a104f-60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.684s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.931185] env[62070]: DEBUG oslo_vmware.api [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122139, 'name': Destroy_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.996601] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "interface-cf52cee8-874e-44e8-a36e-49ac20f3e312-60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.996921] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "interface-cf52cee8-874e-44e8-a36e-49ac20f3e312-60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.997309] env[62070]: DEBUG nova.objects.instance [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lazy-loading 'flavor' on Instance uuid cf52cee8-874e-44e8-a36e-49ac20f3e312 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 959.028694] env[62070]: DEBUG oslo_vmware.api [None req-66ad677e-44ea-4425-98ea-51d271a8e44b tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122138, 'name': PowerOnVM_Task, 'duration_secs': 0.448013} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.028952] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-66ad677e-44ea-4425-98ea-51d271a8e44b tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 959.029210] env[62070]: DEBUG nova.compute.manager [None req-66ad677e-44ea-4425-98ea-51d271a8e44b tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 959.032357] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4034681-eac7-40c2-9f87-18ebaf668c62 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.133924] env[62070]: DEBUG nova.virt.hardware [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 959.134210] env[62070]: DEBUG nova.virt.hardware [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 959.134378] env[62070]: DEBUG nova.virt.hardware [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 959.134571] env[62070]: DEBUG nova.virt.hardware [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 959.134885] env[62070]: DEBUG nova.virt.hardware [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 959.134885] env[62070]: DEBUG nova.virt.hardware [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 959.139190] env[62070]: DEBUG nova.virt.hardware [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 959.139556] env[62070]: DEBUG nova.virt.hardware [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 959.139871] env[62070]: DEBUG nova.virt.hardware [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 959.140190] env[62070]: DEBUG nova.virt.hardware [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 959.140506] env[62070]: DEBUG nova.virt.hardware [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 959.141866] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e4764c-b998-4f4a-88ce-c02c28022dc3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.146983] env[62070]: DEBUG nova.network.neutron [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Successfully created port: 2c6759e4-b6e7-4b67-b06d-d38d6043d3b2 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 959.158745] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee546b7d-410d-4bc3-9062-145ef6fed647 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.167197] env[62070]: DEBUG nova.network.neutron [req-9aea1cc4-de20-42d4-9be9-ca48de7e93a3 req-0d2d6c48-32d0-4927-9b3c-48ab9020fddd service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Updated VIF entry in instance network info cache for port 45420f68-e309-4569-8dac-28e16d9417d7. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 959.170020] env[62070]: DEBUG nova.network.neutron [req-9aea1cc4-de20-42d4-9be9-ca48de7e93a3 req-0d2d6c48-32d0-4927-9b3c-48ab9020fddd service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Updating instance_info_cache with network_info: [{"id": "45420f68-e309-4569-8dac-28e16d9417d7", "address": "fa:16:3e:08:c2:70", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45420f68-e3", "ovs_interfaceid": "45420f68-e309-4569-8dac-28e16d9417d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.182694] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:57:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1559ce49-7345-443f-bf02-4bfeb88356ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8876137b-4c95-4f50-8bf9-ad7d44ac5052', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 959.195700] env[62070]: DEBUG oslo.service.loopingcall [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 959.197012] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 959.197335] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4b02cc85-4fa8-4129-b738-bbef18e1d59e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.227689] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 959.227689] env[62070]: value = "task-1122140" [ 959.227689] env[62070]: _type = "Task" [ 959.227689] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.242298] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122140, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.292292] env[62070]: DEBUG nova.compute.manager [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 959.428197] env[62070]: DEBUG oslo_vmware.api [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122139, 'name': Destroy_Task} progress is 33%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.574025] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d1ef493-467f-479a-8c16-b40dd688f608 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.583590] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ffa079a-ea39-4dfc-9919-51ec556166da {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.619969] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41c0bedf-f907-4161-a284-5adf0f66e989 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.626144] env[62070]: DEBUG nova.objects.instance [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lazy-loading 'pci_requests' on Instance uuid cf52cee8-874e-44e8-a36e-49ac20f3e312 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 959.631030] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ae64964-9401-4045-b24f-48c036707000 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.642192] env[62070]: DEBUG nova.compute.provider_tree [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 959.697086] env[62070]: DEBUG oslo_concurrency.lockutils [req-9aea1cc4-de20-42d4-9be9-ca48de7e93a3 req-0d2d6c48-32d0-4927-9b3c-48ab9020fddd service nova] Releasing lock "refresh_cache-21bcb1a6-833b-48f3-8ee2-0e49c64a104f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.738731] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122140, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.927926] env[62070]: DEBUG oslo_vmware.api [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122139, 'name': Destroy_Task, 'duration_secs': 0.674072} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.928212] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Destroyed the VM [ 959.928494] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Deleting Snapshot of the VM instance {{(pid=62070) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 959.928680] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-81945634-30d7-4632-bd15-19f752b53695 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.936402] env[62070]: DEBUG oslo_vmware.api [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 959.936402] env[62070]: value = "task-1122141" [ 959.936402] env[62070]: _type = "Task" [ 959.936402] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.945617] env[62070]: DEBUG oslo_vmware.api [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122141, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.133428] env[62070]: DEBUG nova.objects.base [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62070) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 960.133612] env[62070]: DEBUG nova.network.neutron [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 960.145065] env[62070]: DEBUG nova.scheduler.client.report [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 960.238931] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122140, 'name': CreateVM_Task, 'duration_secs': 0.55852} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.239126] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 960.239848] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.240052] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.240429] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 960.240694] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbaacda5-0288-4a17-96be-d78724ce8e3c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.246870] env[62070]: DEBUG nova.policy [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7534320dee8f486e90f5174aa94d00bd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '925dff51764c4b56ae7ea05fbde2ecdd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 960.249287] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 960.249287] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]529d682a-68c1-7445-0f65-45840e572cb4" [ 960.249287] env[62070]: _type = "Task" [ 960.249287] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.259726] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]529d682a-68c1-7445-0f65-45840e572cb4, 'name': SearchDatastore_Task, 'duration_secs': 0.011213} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.260015] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.260252] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 960.260490] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.260643] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.260826] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 960.261119] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0381507-9b16-4290-8f81-deef9e31326e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.270266] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 960.270473] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 960.271199] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9dd7c8b3-2992-4c83-a29f-c7e39eb146e0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.281928] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 960.281928] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52df963f-5354-c730-0b54-2f91e4c42eea" [ 960.281928] env[62070]: _type = "Task" [ 960.281928] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.290460] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52df963f-5354-c730-0b54-2f91e4c42eea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.301702] env[62070]: DEBUG nova.compute.manager [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 960.329363] env[62070]: DEBUG nova.compute.manager [req-6bb2931c-16cb-497b-8dbf-e2b7b6e50196 req-ecbe8d7b-42b3-4640-8ea7-07075ac6b11e service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Received event network-changed-c06feb60-bfb1-47ea-8764-52391d9b0b78 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 960.329621] env[62070]: DEBUG nova.compute.manager [req-6bb2931c-16cb-497b-8dbf-e2b7b6e50196 req-ecbe8d7b-42b3-4640-8ea7-07075ac6b11e service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Refreshing instance network info cache due to event network-changed-c06feb60-bfb1-47ea-8764-52391d9b0b78. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 960.329851] env[62070]: DEBUG oslo_concurrency.lockutils [req-6bb2931c-16cb-497b-8dbf-e2b7b6e50196 req-ecbe8d7b-42b3-4640-8ea7-07075ac6b11e service nova] Acquiring lock "refresh_cache-cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.330009] env[62070]: DEBUG oslo_concurrency.lockutils [req-6bb2931c-16cb-497b-8dbf-e2b7b6e50196 req-ecbe8d7b-42b3-4640-8ea7-07075ac6b11e service nova] Acquired lock "refresh_cache-cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.330177] env[62070]: DEBUG nova.network.neutron [req-6bb2931c-16cb-497b-8dbf-e2b7b6e50196 req-ecbe8d7b-42b3-4640-8ea7-07075ac6b11e service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Refreshing network info cache for port c06feb60-bfb1-47ea-8764-52391d9b0b78 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 960.348922] env[62070]: DEBUG nova.virt.hardware [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 960.349193] env[62070]: DEBUG nova.virt.hardware [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 960.349357] env[62070]: DEBUG nova.virt.hardware [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 960.349546] env[62070]: DEBUG nova.virt.hardware [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 960.349697] env[62070]: DEBUG nova.virt.hardware [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 960.349845] env[62070]: DEBUG nova.virt.hardware [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 960.350064] env[62070]: DEBUG nova.virt.hardware [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 960.350236] env[62070]: DEBUG nova.virt.hardware [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 960.350406] env[62070]: DEBUG nova.virt.hardware [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 960.350573] env[62070]: DEBUG nova.virt.hardware [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 960.350753] env[62070]: DEBUG nova.virt.hardware [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 960.352010] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ff1865-44c2-41c5-abb5-9bd4ab8ae517 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.360878] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e69d35dc-b148-467f-af0f-f57af48a7b45 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.446891] env[62070]: DEBUG oslo_vmware.api [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122141, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.650663] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.365s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.650946] env[62070]: DEBUG nova.compute.manager [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 960.654354] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.221s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.655843] env[62070]: INFO nova.compute.claims [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 960.738869] env[62070]: DEBUG nova.compute.manager [req-38290f0d-d579-46df-a93d-e910196a5202 req-db243019-6bab-4206-8e98-bf563eff53f4 service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Received event network-vif-plugged-2c6759e4-b6e7-4b67-b06d-d38d6043d3b2 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 960.739204] env[62070]: DEBUG oslo_concurrency.lockutils [req-38290f0d-d579-46df-a93d-e910196a5202 req-db243019-6bab-4206-8e98-bf563eff53f4 service nova] Acquiring lock "53a1791d-38fd-4721-b82c-2f0922348300-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.739495] env[62070]: DEBUG oslo_concurrency.lockutils [req-38290f0d-d579-46df-a93d-e910196a5202 req-db243019-6bab-4206-8e98-bf563eff53f4 service nova] Lock "53a1791d-38fd-4721-b82c-2f0922348300-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.739610] env[62070]: DEBUG oslo_concurrency.lockutils [req-38290f0d-d579-46df-a93d-e910196a5202 req-db243019-6bab-4206-8e98-bf563eff53f4 service nova] Lock "53a1791d-38fd-4721-b82c-2f0922348300-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.739786] env[62070]: DEBUG nova.compute.manager [req-38290f0d-d579-46df-a93d-e910196a5202 req-db243019-6bab-4206-8e98-bf563eff53f4 service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] No waiting events found dispatching network-vif-plugged-2c6759e4-b6e7-4b67-b06d-d38d6043d3b2 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 960.740489] env[62070]: WARNING nova.compute.manager [req-38290f0d-d579-46df-a93d-e910196a5202 req-db243019-6bab-4206-8e98-bf563eff53f4 service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Received unexpected event network-vif-plugged-2c6759e4-b6e7-4b67-b06d-d38d6043d3b2 for instance with vm_state building and task_state spawning. [ 960.795994] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52df963f-5354-c730-0b54-2f91e4c42eea, 'name': SearchDatastore_Task, 'duration_secs': 0.008669} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.797027] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e0529cd-e83d-42e4-936b-483939a3e9d1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.803174] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 960.803174] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]524f517f-6cbf-f88c-cd5f-5a8e3743e6b9" [ 960.803174] env[62070]: _type = "Task" [ 960.803174] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.813373] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]524f517f-6cbf-f88c-cd5f-5a8e3743e6b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.891575] env[62070]: DEBUG nova.network.neutron [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Successfully updated port: 2c6759e4-b6e7-4b67-b06d-d38d6043d3b2 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 960.950732] env[62070]: DEBUG oslo_vmware.api [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122141, 'name': RemoveSnapshot_Task, 'duration_secs': 0.567156} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.951145] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Deleted Snapshot of the VM instance {{(pid=62070) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 961.083743] env[62070]: DEBUG nova.network.neutron [req-6bb2931c-16cb-497b-8dbf-e2b7b6e50196 req-ecbe8d7b-42b3-4640-8ea7-07075ac6b11e service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Updated VIF entry in instance network info cache for port c06feb60-bfb1-47ea-8764-52391d9b0b78. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 961.083743] env[62070]: DEBUG nova.network.neutron [req-6bb2931c-16cb-497b-8dbf-e2b7b6e50196 req-ecbe8d7b-42b3-4640-8ea7-07075ac6b11e service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Updating instance_info_cache with network_info: [{"id": "c06feb60-bfb1-47ea-8764-52391d9b0b78", "address": "fa:16:3e:66:3c:b1", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.203", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc06feb60-bf", "ovs_interfaceid": "c06feb60-bfb1-47ea-8764-52391d9b0b78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.162258] env[62070]: DEBUG nova.compute.utils [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 961.165083] env[62070]: DEBUG nova.compute.manager [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 961.165633] env[62070]: DEBUG nova.network.neutron [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 961.209834] env[62070]: DEBUG nova.policy [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a91eca948b964f1885f1effb82ea35dc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '735d24ccc5614660a5b34d77af648f94', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 961.314885] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]524f517f-6cbf-f88c-cd5f-5a8e3743e6b9, 'name': SearchDatastore_Task, 'duration_secs': 0.010401} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.318442] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.318442] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 2c58db1d-405f-4489-85db-c74723be4a8d/2c58db1d-405f-4489-85db-c74723be4a8d.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 961.318442] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a3059c3c-5814-4543-b752-c25ebfa89a98 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.325058] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 961.325058] env[62070]: value = "task-1122142" [ 961.325058] env[62070]: _type = "Task" [ 961.325058] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.334157] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122142, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.396241] env[62070]: DEBUG oslo_concurrency.lockutils [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquiring lock "refresh_cache-53a1791d-38fd-4721-b82c-2f0922348300" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 961.396241] env[62070]: DEBUG oslo_concurrency.lockutils [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquired lock "refresh_cache-53a1791d-38fd-4721-b82c-2f0922348300" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.396241] env[62070]: DEBUG nova.network.neutron [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 961.461458] env[62070]: WARNING nova.compute.manager [None req-49a2a495-4105-4d6c-b6c7-f51ce433ccfa tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Image not found during snapshot: nova.exception.ImageNotFound: Image 26ae3f8a-c05b-4bd4-8dc9-ea50dab18ec1 could not be found. [ 961.580430] env[62070]: DEBUG nova.network.neutron [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Successfully created port: 79f2a280-e16e-4dcd-9a80-21c1fc225a8c {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 961.589639] env[62070]: DEBUG oslo_concurrency.lockutils [req-6bb2931c-16cb-497b-8dbf-e2b7b6e50196 req-ecbe8d7b-42b3-4640-8ea7-07075ac6b11e service nova] Releasing lock "refresh_cache-cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.668036] env[62070]: DEBUG nova.compute.manager [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 961.843018] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122142, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.463048} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.843018] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 2c58db1d-405f-4489-85db-c74723be4a8d/2c58db1d-405f-4489-85db-c74723be4a8d.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 961.843018] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 961.843018] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-955e4d86-8d71-4eaa-ae46-87ba9a03729d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.851494] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 961.851494] env[62070]: value = "task-1122143" [ 961.851494] env[62070]: _type = "Task" [ 961.851494] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.860798] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122143, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.933071] env[62070]: DEBUG nova.network.neutron [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 961.982540] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea7e7cb-c61c-493b-98d5-7f77023b072b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.994449] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b029862-d60d-405d-ba28-6ac8eff30657 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.035911] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bafde8e-59a0-4fa3-b9c8-49684d8170cf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.044527] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b635ed9a-56d1-4534-9f65-54b0965518eb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.062374] env[62070]: DEBUG nova.compute.provider_tree [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 962.065652] env[62070]: DEBUG nova.network.neutron [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Successfully updated port: 60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 962.164822] env[62070]: DEBUG nova.network.neutron [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Updating instance_info_cache with network_info: [{"id": "2c6759e4-b6e7-4b67-b06d-d38d6043d3b2", "address": "fa:16:3e:11:96:b8", "network": {"id": "6a62b79f-a98b-4518-86cb-facc7b77da1d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2107556336-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "772f7fcee5f44b899b6df797e1ed5ddd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c6759e4-b6", "ovs_interfaceid": "2c6759e4-b6e7-4b67-b06d-d38d6043d3b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.361462] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122143, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093709} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.361742] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 962.363106] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dca33ef-da75-4758-9e4f-5a4725e93600 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.385609] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 2c58db1d-405f-4489-85db-c74723be4a8d/2c58db1d-405f-4489-85db-c74723be4a8d.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 962.386155] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-806a4b93-496c-429e-acd3-b922711bc10b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.409938] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 962.409938] env[62070]: value = "task-1122144" [ 962.409938] env[62070]: _type = "Task" [ 962.409938] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.419025] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122144, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.464324] env[62070]: DEBUG nova.compute.manager [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Received event network-changed-cac26624-11c7-45a9-acb3-3e86b7232ab2 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 962.464484] env[62070]: DEBUG nova.compute.manager [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Refreshing instance network info cache due to event network-changed-cac26624-11c7-45a9-acb3-3e86b7232ab2. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 962.464701] env[62070]: DEBUG oslo_concurrency.lockutils [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] Acquiring lock "refresh_cache-7dc27fe6-495f-498d-88fe-a99ddc19a21c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.464853] env[62070]: DEBUG oslo_concurrency.lockutils [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] Acquired lock "refresh_cache-7dc27fe6-495f-498d-88fe-a99ddc19a21c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.465149] env[62070]: DEBUG nova.network.neutron [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Refreshing network info cache for port cac26624-11c7-45a9-acb3-3e86b7232ab2 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 962.568882] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "refresh_cache-cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.569199] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired lock "refresh_cache-cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.569448] env[62070]: DEBUG nova.network.neutron [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 962.600048] env[62070]: DEBUG nova.scheduler.client.report [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Updated inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with generation 121 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 962.600475] env[62070]: DEBUG nova.compute.provider_tree [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Updating resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 generation from 121 to 122 during operation: update_inventory {{(pid=62070) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 962.600816] env[62070]: DEBUG nova.compute.provider_tree [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 962.668025] env[62070]: DEBUG oslo_concurrency.lockutils [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Releasing lock "refresh_cache-53a1791d-38fd-4721-b82c-2f0922348300" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 962.668277] env[62070]: DEBUG nova.compute.manager [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Instance network_info: |[{"id": "2c6759e4-b6e7-4b67-b06d-d38d6043d3b2", "address": "fa:16:3e:11:96:b8", "network": {"id": "6a62b79f-a98b-4518-86cb-facc7b77da1d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2107556336-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "772f7fcee5f44b899b6df797e1ed5ddd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c6759e4-b6", "ovs_interfaceid": "2c6759e4-b6e7-4b67-b06d-d38d6043d3b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 962.668898] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:96:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78e1ebb0-0130-446b-bf73-a0e59bbb95cc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c6759e4-b6e7-4b67-b06d-d38d6043d3b2', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 962.677302] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Creating folder: Project (772f7fcee5f44b899b6df797e1ed5ddd). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 962.678931] env[62070]: DEBUG nova.compute.manager [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 962.680990] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-067aacc5-f471-48b2-9634-aa9adefd94ea {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.694420] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Created folder: Project (772f7fcee5f44b899b6df797e1ed5ddd) in parent group-v245319. [ 962.694652] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Creating folder: Instances. Parent ref: group-v245470. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 962.695091] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-819cefb0-0fe4-44b1-9ef5-a56782f72ac1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.705536] env[62070]: DEBUG nova.virt.hardware [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 962.705794] env[62070]: DEBUG nova.virt.hardware [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 962.705954] env[62070]: DEBUG nova.virt.hardware [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 962.706156] env[62070]: DEBUG nova.virt.hardware [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 962.706478] env[62070]: DEBUG nova.virt.hardware [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 962.706650] env[62070]: DEBUG nova.virt.hardware [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 962.706869] env[62070]: DEBUG nova.virt.hardware [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 962.707045] env[62070]: DEBUG nova.virt.hardware [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 962.707222] env[62070]: DEBUG nova.virt.hardware [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 962.707392] env[62070]: DEBUG nova.virt.hardware [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 962.707626] env[62070]: DEBUG nova.virt.hardware [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 962.708566] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6caaa577-9cfe-4270-a0a7-d8d81d83a6d2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.713281] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Created folder: Instances in parent group-v245470. [ 962.713536] env[62070]: DEBUG oslo.service.loopingcall [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 962.714107] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 962.714337] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cecde5ee-0fe7-4ccb-98cb-f2d161c44ca6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.732909] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a4e74ae-1149-4e6c-ac9b-06270043d11a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.738406] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 962.738406] env[62070]: value = "task-1122147" [ 962.738406] env[62070]: _type = "Task" [ 962.738406] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.755184] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122147, 'name': CreateVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.828904] env[62070]: DEBUG nova.compute.manager [req-b4df7a76-8a65-4ccb-ab2c-5d6748c86b5f req-2962b559-0184-400b-8edc-10db336d6026 service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Received event network-changed-2c6759e4-b6e7-4b67-b06d-d38d6043d3b2 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 962.829217] env[62070]: DEBUG nova.compute.manager [req-b4df7a76-8a65-4ccb-ab2c-5d6748c86b5f req-2962b559-0184-400b-8edc-10db336d6026 service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Refreshing instance network info cache due to event network-changed-2c6759e4-b6e7-4b67-b06d-d38d6043d3b2. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 962.829453] env[62070]: DEBUG oslo_concurrency.lockutils [req-b4df7a76-8a65-4ccb-ab2c-5d6748c86b5f req-2962b559-0184-400b-8edc-10db336d6026 service nova] Acquiring lock "refresh_cache-53a1791d-38fd-4721-b82c-2f0922348300" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.829605] env[62070]: DEBUG oslo_concurrency.lockutils [req-b4df7a76-8a65-4ccb-ab2c-5d6748c86b5f req-2962b559-0184-400b-8edc-10db336d6026 service nova] Acquired lock "refresh_cache-53a1791d-38fd-4721-b82c-2f0922348300" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.829778] env[62070]: DEBUG nova.network.neutron [req-b4df7a76-8a65-4ccb-ab2c-5d6748c86b5f req-2962b559-0184-400b-8edc-10db336d6026 service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Refreshing network info cache for port 2c6759e4-b6e7-4b67-b06d-d38d6043d3b2 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 962.838749] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "519cad6a-ebe0-42db-a19e-27249b83436e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.838987] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "519cad6a-ebe0-42db-a19e-27249b83436e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.839181] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "519cad6a-ebe0-42db-a19e-27249b83436e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.839369] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "519cad6a-ebe0-42db-a19e-27249b83436e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.839535] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "519cad6a-ebe0-42db-a19e-27249b83436e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.841769] env[62070]: INFO nova.compute.manager [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Terminating instance [ 962.844032] env[62070]: DEBUG nova.compute.manager [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 962.844032] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 962.845015] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db7b5e6-1e75-484a-9d60-6e12a66a71a4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.853595] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 962.854034] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9bf858f8-8074-4bd8-9045-fa009d58dcbd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.861222] env[62070]: DEBUG oslo_vmware.api [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 962.861222] env[62070]: value = "task-1122148" [ 962.861222] env[62070]: _type = "Task" [ 962.861222] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.870037] env[62070]: DEBUG oslo_vmware.api [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122148, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.922074] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122144, 'name': ReconfigVM_Task, 'duration_secs': 0.312587} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.922242] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 2c58db1d-405f-4489-85db-c74723be4a8d/2c58db1d-405f-4489-85db-c74723be4a8d.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 962.922835] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-42a031e8-eaee-40d2-b86e-773fe3c4e445 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.930615] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 962.930615] env[62070]: value = "task-1122149" [ 962.930615] env[62070]: _type = "Task" [ 962.930615] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.939742] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122149, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.108017] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.453s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.108536] env[62070]: DEBUG nova.compute.manager [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 963.111351] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 12.229s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.111573] env[62070]: DEBUG nova.objects.instance [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62070) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 963.120030] env[62070]: WARNING nova.network.neutron [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] 48dc51c7-cfa4-452e-9d72-2968d9a40dfa already exists in list: networks containing: ['48dc51c7-cfa4-452e-9d72-2968d9a40dfa']. ignoring it [ 963.205286] env[62070]: DEBUG nova.network.neutron [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Updated VIF entry in instance network info cache for port cac26624-11c7-45a9-acb3-3e86b7232ab2. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 963.205798] env[62070]: DEBUG nova.network.neutron [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Updating instance_info_cache with network_info: [{"id": "cac26624-11c7-45a9-acb3-3e86b7232ab2", "address": "fa:16:3e:8c:68:96", "network": {"id": "754f4ec8-0bc6-4726-8b88-1a4e1a326699", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-293486644-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a94db233e3a43dc9aa7cf887c6cb1f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcac26624-11", "ovs_interfaceid": "cac26624-11c7-45a9-acb3-3e86b7232ab2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.250404] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122147, 'name': CreateVM_Task, 'duration_secs': 0.402111} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.252968] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 963.253622] env[62070]: DEBUG oslo_concurrency.lockutils [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 963.253795] env[62070]: DEBUG oslo_concurrency.lockutils [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.254134] env[62070]: DEBUG oslo_concurrency.lockutils [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 963.254689] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab20fe55-b6a2-4708-bc16-6b35a391d1bd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.261330] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 963.261330] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52d73130-4e90-5f4a-1b78-3cffc742a684" [ 963.261330] env[62070]: _type = "Task" [ 963.261330] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.269616] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52d73130-4e90-5f4a-1b78-3cffc742a684, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.337634] env[62070]: DEBUG nova.network.neutron [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Successfully updated port: 79f2a280-e16e-4dcd-9a80-21c1fc225a8c {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 963.376205] env[62070]: DEBUG oslo_vmware.api [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122148, 'name': PowerOffVM_Task, 'duration_secs': 0.261909} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.379899] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 963.380355] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 963.380734] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ec8b92d-c35e-4381-a635-c070c47079e9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.444580] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122149, 'name': Rename_Task, 'duration_secs': 0.20231} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.444896] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 963.445122] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-37c004bd-daed-478f-9dfb-1c813fba3269 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.454817] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 963.454817] env[62070]: value = "task-1122151" [ 963.454817] env[62070]: _type = "Task" [ 963.454817] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.464671] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 963.464983] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 963.465197] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Deleting the datastore file [datastore1] 519cad6a-ebe0-42db-a19e-27249b83436e {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 963.468620] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e78eb06-93dc-4079-8bfe-98e7d148314d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.470464] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122151, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.476932] env[62070]: DEBUG oslo_vmware.api [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for the task: (returnval){ [ 963.476932] env[62070]: value = "task-1122152" [ 963.476932] env[62070]: _type = "Task" [ 963.476932] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.486755] env[62070]: DEBUG oslo_vmware.api [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122152, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.510695] env[62070]: DEBUG nova.network.neutron [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Updating instance_info_cache with network_info: [{"id": "c06feb60-bfb1-47ea-8764-52391d9b0b78", "address": "fa:16:3e:66:3c:b1", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.203", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc06feb60-bf", "ovs_interfaceid": "c06feb60-bfb1-47ea-8764-52391d9b0b78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70", "address": "fa:16:3e:7c:c4:2b", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60714fe7-f6", "ovs_interfaceid": "60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.608234] env[62070]: DEBUG nova.network.neutron [req-b4df7a76-8a65-4ccb-ab2c-5d6748c86b5f req-2962b559-0184-400b-8edc-10db336d6026 service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Updated VIF entry in instance network info cache for port 2c6759e4-b6e7-4b67-b06d-d38d6043d3b2. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 963.608385] env[62070]: DEBUG nova.network.neutron [req-b4df7a76-8a65-4ccb-ab2c-5d6748c86b5f req-2962b559-0184-400b-8edc-10db336d6026 service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Updating instance_info_cache with network_info: [{"id": "2c6759e4-b6e7-4b67-b06d-d38d6043d3b2", "address": "fa:16:3e:11:96:b8", "network": {"id": "6a62b79f-a98b-4518-86cb-facc7b77da1d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2107556336-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "772f7fcee5f44b899b6df797e1ed5ddd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c6759e4-b6", "ovs_interfaceid": "2c6759e4-b6e7-4b67-b06d-d38d6043d3b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.616237] env[62070]: DEBUG nova.compute.utils [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 963.620134] env[62070]: DEBUG nova.compute.manager [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 963.620307] env[62070]: DEBUG nova.network.neutron [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 963.660085] env[62070]: DEBUG nova.policy [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'db9baf29d0b5489da2657286bfd695c0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91e246e32f29422e90fae974cfee9d8f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 963.708857] env[62070]: DEBUG oslo_concurrency.lockutils [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] Releasing lock "refresh_cache-7dc27fe6-495f-498d-88fe-a99ddc19a21c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 963.709175] env[62070]: DEBUG nova.compute.manager [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Received event network-changed-cac26624-11c7-45a9-acb3-3e86b7232ab2 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 963.709406] env[62070]: DEBUG nova.compute.manager [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Refreshing instance network info cache due to event network-changed-cac26624-11c7-45a9-acb3-3e86b7232ab2. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 963.709711] env[62070]: DEBUG oslo_concurrency.lockutils [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] Acquiring lock "refresh_cache-7dc27fe6-495f-498d-88fe-a99ddc19a21c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 963.709916] env[62070]: DEBUG oslo_concurrency.lockutils [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] Acquired lock "refresh_cache-7dc27fe6-495f-498d-88fe-a99ddc19a21c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.710156] env[62070]: DEBUG nova.network.neutron [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Refreshing network info cache for port cac26624-11c7-45a9-acb3-3e86b7232ab2 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 963.772766] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52d73130-4e90-5f4a-1b78-3cffc742a684, 'name': SearchDatastore_Task, 'duration_secs': 0.041499} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.773158] env[62070]: DEBUG oslo_concurrency.lockutils [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 963.773448] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 963.773929] env[62070]: DEBUG oslo_concurrency.lockutils [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 963.774153] env[62070]: DEBUG oslo_concurrency.lockutils [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.774387] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 963.774703] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f065bdc4-c59a-4461-a146-af1e6007d10c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.784176] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 963.784402] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 963.785230] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a5cbc4e-65e7-4ac7-a3d8-6ea169811012 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.791264] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 963.791264] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52b2f0d4-e30d-ee65-8b72-d92b9e961f8d" [ 963.791264] env[62070]: _type = "Task" [ 963.791264] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.800582] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52b2f0d4-e30d-ee65-8b72-d92b9e961f8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.842522] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "refresh_cache-a5cba512-9b50-4ca3-93eb-345be12dc588" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 963.842749] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquired lock "refresh_cache-a5cba512-9b50-4ca3-93eb-345be12dc588" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.842982] env[62070]: DEBUG nova.network.neutron [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 963.965462] env[62070]: DEBUG oslo_vmware.api [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122151, 'name': PowerOnVM_Task, 'duration_secs': 0.488864} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.965728] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 963.965965] env[62070]: DEBUG nova.compute.manager [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 963.966742] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd952cc-7d82-4541-8ade-49d82e71db2a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.987126] env[62070]: DEBUG oslo_vmware.api [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Task: {'id': task-1122152, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.270241} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.987423] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 963.987594] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 963.987850] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 963.987962] env[62070]: INFO nova.compute.manager [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Took 1.14 seconds to destroy the instance on the hypervisor. [ 963.988222] env[62070]: DEBUG oslo.service.loopingcall [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 963.988417] env[62070]: DEBUG nova.compute.manager [-] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 963.988511] env[62070]: DEBUG nova.network.neutron [-] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 964.014314] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Releasing lock "refresh_cache-cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 964.014416] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 964.014621] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired lock "cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.015606] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-365e99de-9d70-4c0c-b0dc-dde4c4bb8f65 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.035831] env[62070]: DEBUG nova.virt.hardware [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 964.036100] env[62070]: DEBUG nova.virt.hardware [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 964.036314] env[62070]: DEBUG nova.virt.hardware [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 964.036517] env[62070]: DEBUG nova.virt.hardware [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 964.036718] env[62070]: DEBUG nova.virt.hardware [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 964.036894] env[62070]: DEBUG nova.virt.hardware [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 964.037215] env[62070]: DEBUG nova.virt.hardware [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 964.037671] env[62070]: DEBUG nova.virt.hardware [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 964.038026] env[62070]: DEBUG nova.virt.hardware [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 964.038171] env[62070]: DEBUG nova.virt.hardware [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 964.038366] env[62070]: DEBUG nova.virt.hardware [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 964.046512] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Reconfiguring VM to attach interface {{(pid=62070) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1929}} [ 964.048875] env[62070]: DEBUG nova.network.neutron [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Successfully created port: 20687f56-b863-4fa4-8f5d-df461da5054b {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 964.049435] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-301dc841-76a3-49aa-ab28-eb2e498944e1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.073597] env[62070]: DEBUG oslo_vmware.api [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 964.073597] env[62070]: value = "task-1122153" [ 964.073597] env[62070]: _type = "Task" [ 964.073597] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.083902] env[62070]: DEBUG oslo_vmware.api [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122153, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.110853] env[62070]: DEBUG oslo_concurrency.lockutils [req-b4df7a76-8a65-4ccb-ab2c-5d6748c86b5f req-2962b559-0184-400b-8edc-10db336d6026 service nova] Releasing lock "refresh_cache-53a1791d-38fd-4721-b82c-2f0922348300" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 964.124207] env[62070]: DEBUG nova.compute.manager [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 964.128323] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2cdd6dc5-b80a-4fa8-acc2-8040ca3495d3 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.131360] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.202s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.131576] env[62070]: INFO nova.compute.claims [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 964.303234] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52b2f0d4-e30d-ee65-8b72-d92b9e961f8d, 'name': SearchDatastore_Task, 'duration_secs': 0.014865} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.304095] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b27de5e-9c2d-4801-8a3b-0e6b32e3c62a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.310889] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 964.310889] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52915ee8-778c-6c3f-a2e0-2fabdafe5d79" [ 964.310889] env[62070]: _type = "Task" [ 964.310889] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.324419] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52915ee8-778c-6c3f-a2e0-2fabdafe5d79, 'name': SearchDatastore_Task, 'duration_secs': 0.01119} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.324721] env[62070]: DEBUG oslo_concurrency.lockutils [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 964.325010] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 53a1791d-38fd-4721-b82c-2f0922348300/53a1791d-38fd-4721-b82c-2f0922348300.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 964.325288] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e0086ed7-20e4-4aa6-a6fa-82ae4b356df0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.332748] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 964.332748] env[62070]: value = "task-1122154" [ 964.332748] env[62070]: _type = "Task" [ 964.332748] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.343645] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122154, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.405690] env[62070]: DEBUG nova.network.neutron [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 964.485068] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.585625] env[62070]: DEBUG oslo_vmware.api [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122153, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.610315] env[62070]: DEBUG nova.network.neutron [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Updated VIF entry in instance network info cache for port cac26624-11c7-45a9-acb3-3e86b7232ab2. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 964.610700] env[62070]: DEBUG nova.network.neutron [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Updating instance_info_cache with network_info: [{"id": "cac26624-11c7-45a9-acb3-3e86b7232ab2", "address": "fa:16:3e:8c:68:96", "network": {"id": "754f4ec8-0bc6-4726-8b88-1a4e1a326699", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-293486644-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a94db233e3a43dc9aa7cf887c6cb1f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcac26624-11", "ovs_interfaceid": "cac26624-11c7-45a9-acb3-3e86b7232ab2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.642166] env[62070]: DEBUG nova.compute.manager [req-ea028e61-2811-4f7c-a0cf-7d7db11d5305 req-54ae33af-074d-493e-8235-ed7626bdab0d service nova] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Received event network-vif-plugged-79f2a280-e16e-4dcd-9a80-21c1fc225a8c {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 964.642389] env[62070]: DEBUG oslo_concurrency.lockutils [req-ea028e61-2811-4f7c-a0cf-7d7db11d5305 req-54ae33af-074d-493e-8235-ed7626bdab0d service nova] Acquiring lock "a5cba512-9b50-4ca3-93eb-345be12dc588-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.642604] env[62070]: DEBUG oslo_concurrency.lockutils [req-ea028e61-2811-4f7c-a0cf-7d7db11d5305 req-54ae33af-074d-493e-8235-ed7626bdab0d service nova] Lock "a5cba512-9b50-4ca3-93eb-345be12dc588-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.642772] env[62070]: DEBUG oslo_concurrency.lockutils [req-ea028e61-2811-4f7c-a0cf-7d7db11d5305 req-54ae33af-074d-493e-8235-ed7626bdab0d service nova] Lock "a5cba512-9b50-4ca3-93eb-345be12dc588-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.642939] env[62070]: DEBUG nova.compute.manager [req-ea028e61-2811-4f7c-a0cf-7d7db11d5305 req-54ae33af-074d-493e-8235-ed7626bdab0d service nova] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] No waiting events found dispatching network-vif-plugged-79f2a280-e16e-4dcd-9a80-21c1fc225a8c {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 964.643118] env[62070]: WARNING nova.compute.manager [req-ea028e61-2811-4f7c-a0cf-7d7db11d5305 req-54ae33af-074d-493e-8235-ed7626bdab0d service nova] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Received unexpected event network-vif-plugged-79f2a280-e16e-4dcd-9a80-21c1fc225a8c for instance with vm_state building and task_state spawning. [ 964.643689] env[62070]: DEBUG nova.compute.manager [req-ea028e61-2811-4f7c-a0cf-7d7db11d5305 req-54ae33af-074d-493e-8235-ed7626bdab0d service nova] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Received event network-changed-79f2a280-e16e-4dcd-9a80-21c1fc225a8c {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 964.643689] env[62070]: DEBUG nova.compute.manager [req-ea028e61-2811-4f7c-a0cf-7d7db11d5305 req-54ae33af-074d-493e-8235-ed7626bdab0d service nova] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Refreshing instance network info cache due to event network-changed-79f2a280-e16e-4dcd-9a80-21c1fc225a8c. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 964.643689] env[62070]: DEBUG oslo_concurrency.lockutils [req-ea028e61-2811-4f7c-a0cf-7d7db11d5305 req-54ae33af-074d-493e-8235-ed7626bdab0d service nova] Acquiring lock "refresh_cache-a5cba512-9b50-4ca3-93eb-345be12dc588" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 964.645191] env[62070]: DEBUG nova.network.neutron [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Updating instance_info_cache with network_info: [{"id": "79f2a280-e16e-4dcd-9a80-21c1fc225a8c", "address": "fa:16:3e:c0:7a:98", "network": {"id": "6ea9aade-1b40-4ce8-a502-14ff09a4ab40", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1617295069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "735d24ccc5614660a5b34d77af648f94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79f2a280-e1", "ovs_interfaceid": "79f2a280-e16e-4dcd-9a80-21c1fc225a8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.782221] env[62070]: DEBUG nova.network.neutron [-] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.846695] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122154, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.084191] env[62070]: DEBUG oslo_vmware.api [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122153, 'name': ReconfigVM_Task, 'duration_secs': 0.63004} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.084757] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Releasing lock "cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.085028] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Reconfigured VM to attach interface {{(pid=62070) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1943}} [ 965.113739] env[62070]: DEBUG oslo_concurrency.lockutils [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] Releasing lock "refresh_cache-7dc27fe6-495f-498d-88fe-a99ddc19a21c" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.114093] env[62070]: DEBUG nova.compute.manager [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Received event network-vif-plugged-60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 965.114272] env[62070]: DEBUG oslo_concurrency.lockutils [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] Acquiring lock "cf52cee8-874e-44e8-a36e-49ac20f3e312-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.114490] env[62070]: DEBUG oslo_concurrency.lockutils [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] Lock "cf52cee8-874e-44e8-a36e-49ac20f3e312-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.114665] env[62070]: DEBUG oslo_concurrency.lockutils [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] Lock "cf52cee8-874e-44e8-a36e-49ac20f3e312-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.114830] env[62070]: DEBUG nova.compute.manager [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] No waiting events found dispatching network-vif-plugged-60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 965.115000] env[62070]: WARNING nova.compute.manager [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Received unexpected event network-vif-plugged-60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70 for instance with vm_state active and task_state None. [ 965.115183] env[62070]: DEBUG nova.compute.manager [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Received event network-changed-60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 965.115340] env[62070]: DEBUG nova.compute.manager [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Refreshing instance network info cache due to event network-changed-60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 965.115529] env[62070]: DEBUG oslo_concurrency.lockutils [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] Acquiring lock "refresh_cache-cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.115669] env[62070]: DEBUG oslo_concurrency.lockutils [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] Acquired lock "refresh_cache-cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.115828] env[62070]: DEBUG nova.network.neutron [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Refreshing network info cache for port 60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 965.137159] env[62070]: DEBUG nova.compute.manager [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 965.148112] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Releasing lock "refresh_cache-a5cba512-9b50-4ca3-93eb-345be12dc588" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.148112] env[62070]: DEBUG nova.compute.manager [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Instance network_info: |[{"id": "79f2a280-e16e-4dcd-9a80-21c1fc225a8c", "address": "fa:16:3e:c0:7a:98", "network": {"id": "6ea9aade-1b40-4ce8-a502-14ff09a4ab40", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1617295069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "735d24ccc5614660a5b34d77af648f94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79f2a280-e1", "ovs_interfaceid": "79f2a280-e16e-4dcd-9a80-21c1fc225a8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 965.148348] env[62070]: DEBUG oslo_concurrency.lockutils [req-ea028e61-2811-4f7c-a0cf-7d7db11d5305 req-54ae33af-074d-493e-8235-ed7626bdab0d service nova] Acquired lock "refresh_cache-a5cba512-9b50-4ca3-93eb-345be12dc588" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.148411] env[62070]: DEBUG nova.network.neutron [req-ea028e61-2811-4f7c-a0cf-7d7db11d5305 req-54ae33af-074d-493e-8235-ed7626bdab0d service nova] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Refreshing network info cache for port 79f2a280-e16e-4dcd-9a80-21c1fc225a8c {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 965.149519] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:7a:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5b8af79a-31d5-4d78-93d7-3919aa1d9186', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '79f2a280-e16e-4dcd-9a80-21c1fc225a8c', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 965.157109] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Creating folder: Project (735d24ccc5614660a5b34d77af648f94). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 965.161520] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-35c6232e-010f-4397-8a52-95e6f13895a3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.177085] env[62070]: DEBUG nova.virt.hardware [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 965.177363] env[62070]: DEBUG nova.virt.hardware [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 965.177552] env[62070]: DEBUG nova.virt.hardware [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 965.177756] env[62070]: DEBUG nova.virt.hardware [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 965.177904] env[62070]: DEBUG nova.virt.hardware [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 965.181127] env[62070]: DEBUG nova.virt.hardware [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 965.181127] env[62070]: DEBUG nova.virt.hardware [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 965.181311] env[62070]: DEBUG nova.virt.hardware [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 965.181351] env[62070]: DEBUG nova.virt.hardware [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 965.181483] env[62070]: DEBUG nova.virt.hardware [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 965.181654] env[62070]: DEBUG nova.virt.hardware [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 965.182891] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd84302-e3d5-4dd7-86c6-9abc309c29f4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.186792] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Created folder: Project (735d24ccc5614660a5b34d77af648f94) in parent group-v245319. [ 965.186979] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Creating folder: Instances. Parent ref: group-v245473. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 965.189612] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a1a0402d-f35e-4586-a0d9-50520cc37d15 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.195081] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd3036f-052d-4d58-8c10-1459ecfce0a7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.202473] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Created folder: Instances in parent group-v245473. [ 965.202733] env[62070]: DEBUG oslo.service.loopingcall [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 965.205532] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 965.206699] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f8674577-3da0-471e-8f01-a000b2edbec4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.235857] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 965.235857] env[62070]: value = "task-1122157" [ 965.235857] env[62070]: _type = "Task" [ 965.235857] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.247666] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122157, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.284966] env[62070]: INFO nova.compute.manager [-] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Took 1.30 seconds to deallocate network for instance. [ 965.344201] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122154, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.715787} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.346797] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 53a1791d-38fd-4721-b82c-2f0922348300/53a1791d-38fd-4721-b82c-2f0922348300.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 965.347063] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 965.348200] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2e971e47-4c38-4e00-a832-4126a9f5f35f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.360348] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 965.360348] env[62070]: value = "task-1122158" [ 965.360348] env[62070]: _type = "Task" [ 965.360348] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.370569] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122158, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.390378] env[62070]: DEBUG nova.network.neutron [req-ea028e61-2811-4f7c-a0cf-7d7db11d5305 req-54ae33af-074d-493e-8235-ed7626bdab0d service nova] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Updated VIF entry in instance network info cache for port 79f2a280-e16e-4dcd-9a80-21c1fc225a8c. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 965.390732] env[62070]: DEBUG nova.network.neutron [req-ea028e61-2811-4f7c-a0cf-7d7db11d5305 req-54ae33af-074d-493e-8235-ed7626bdab0d service nova] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Updating instance_info_cache with network_info: [{"id": "79f2a280-e16e-4dcd-9a80-21c1fc225a8c", "address": "fa:16:3e:c0:7a:98", "network": {"id": "6ea9aade-1b40-4ce8-a502-14ff09a4ab40", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1617295069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "735d24ccc5614660a5b34d77af648f94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79f2a280-e1", "ovs_interfaceid": "79f2a280-e16e-4dcd-9a80-21c1fc225a8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.437158] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b5c73f-af6a-4e18-a829-560f0f3dde10 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.445362] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a0ff749-c032-494f-8bf1-c2ddd2f92837 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.476599] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b78158ac-5731-4b5d-8c2f-30b28b2496c1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.484741] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81405202-ab14-4c05-af7d-ab3ba7c1d542 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.499135] env[62070]: DEBUG nova.compute.provider_tree [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 965.589343] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5f812c03-0b47-438f-9e1c-ee619b6efa11 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "interface-cf52cee8-874e-44e8-a36e-49ac20f3e312-60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.592s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.749709] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122157, 'name': CreateVM_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.791651] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.836133] env[62070]: DEBUG nova.network.neutron [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Updated VIF entry in instance network info cache for port 60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 965.836560] env[62070]: DEBUG nova.network.neutron [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Updating instance_info_cache with network_info: [{"id": "c06feb60-bfb1-47ea-8764-52391d9b0b78", "address": "fa:16:3e:66:3c:b1", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.203", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc06feb60-bf", "ovs_interfaceid": "c06feb60-bfb1-47ea-8764-52391d9b0b78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70", "address": "fa:16:3e:7c:c4:2b", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60714fe7-f6", "ovs_interfaceid": "60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.871623] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122158, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08336} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.871936] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 965.872721] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ae2a0c-7e97-4572-8994-fceb0e42fa3f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.896727] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 53a1791d-38fd-4721-b82c-2f0922348300/53a1791d-38fd-4721-b82c-2f0922348300.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 965.897238] env[62070]: DEBUG oslo_concurrency.lockutils [req-ea028e61-2811-4f7c-a0cf-7d7db11d5305 req-54ae33af-074d-493e-8235-ed7626bdab0d service nova] Releasing lock "refresh_cache-a5cba512-9b50-4ca3-93eb-345be12dc588" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.897514] env[62070]: DEBUG nova.compute.manager [req-ea028e61-2811-4f7c-a0cf-7d7db11d5305 req-54ae33af-074d-493e-8235-ed7626bdab0d service nova] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Received event network-vif-deleted-52474984-690f-441c-9477-d3d0a3ab1bb7 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 965.897673] env[62070]: INFO nova.compute.manager [req-ea028e61-2811-4f7c-a0cf-7d7db11d5305 req-54ae33af-074d-493e-8235-ed7626bdab0d service nova] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Neutron deleted interface 52474984-690f-441c-9477-d3d0a3ab1bb7; detaching it from the instance and deleting it from the info cache [ 965.897852] env[62070]: DEBUG nova.network.neutron [req-ea028e61-2811-4f7c-a0cf-7d7db11d5305 req-54ae33af-074d-493e-8235-ed7626bdab0d service nova] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.898957] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-357432e1-de49-4567-a586-51d74395a408 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.916140] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7b3333ce-6402-43ac-9bd0-92d983549187 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.926407] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c240b886-7ee7-4dd0-b801-aeb2eaebf307 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.940167] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 965.940167] env[62070]: value = "task-1122159" [ 965.940167] env[62070]: _type = "Task" [ 965.940167] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.950536] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122159, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.965464] env[62070]: DEBUG nova.compute.manager [req-ea028e61-2811-4f7c-a0cf-7d7db11d5305 req-54ae33af-074d-493e-8235-ed7626bdab0d service nova] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Detach interface failed, port_id=52474984-690f-441c-9477-d3d0a3ab1bb7, reason: Instance 519cad6a-ebe0-42db-a19e-27249b83436e could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 966.000213] env[62070]: DEBUG nova.network.neutron [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Successfully updated port: 20687f56-b863-4fa4-8f5d-df461da5054b {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 966.002285] env[62070]: DEBUG nova.scheduler.client.report [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 966.202673] env[62070]: DEBUG nova.compute.manager [req-bf18efd6-be23-4f08-97c1-48332eddd4e2 req-64d72be8-bd34-4d07-8176-d090ffe6c30e service nova] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Received event network-vif-plugged-20687f56-b863-4fa4-8f5d-df461da5054b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 966.202673] env[62070]: DEBUG oslo_concurrency.lockutils [req-bf18efd6-be23-4f08-97c1-48332eddd4e2 req-64d72be8-bd34-4d07-8176-d090ffe6c30e service nova] Acquiring lock "1d595bc8-ab51-4443-bf32-079078f3133b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.202673] env[62070]: DEBUG oslo_concurrency.lockutils [req-bf18efd6-be23-4f08-97c1-48332eddd4e2 req-64d72be8-bd34-4d07-8176-d090ffe6c30e service nova] Lock "1d595bc8-ab51-4443-bf32-079078f3133b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.202673] env[62070]: DEBUG oslo_concurrency.lockutils [req-bf18efd6-be23-4f08-97c1-48332eddd4e2 req-64d72be8-bd34-4d07-8176-d090ffe6c30e service nova] Lock "1d595bc8-ab51-4443-bf32-079078f3133b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.202673] env[62070]: DEBUG nova.compute.manager [req-bf18efd6-be23-4f08-97c1-48332eddd4e2 req-64d72be8-bd34-4d07-8176-d090ffe6c30e service nova] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] No waiting events found dispatching network-vif-plugged-20687f56-b863-4fa4-8f5d-df461da5054b {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 966.203047] env[62070]: WARNING nova.compute.manager [req-bf18efd6-be23-4f08-97c1-48332eddd4e2 req-64d72be8-bd34-4d07-8176-d090ffe6c30e service nova] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Received unexpected event network-vif-plugged-20687f56-b863-4fa4-8f5d-df461da5054b for instance with vm_state building and task_state spawning. [ 966.248493] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122157, 'name': CreateVM_Task, 'duration_secs': 0.91115} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.248675] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 966.250030] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.250030] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.250030] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 966.250193] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6929ed4-6c82-496d-acb7-37f882268f26 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.255512] env[62070]: DEBUG oslo_vmware.api [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 966.255512] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]526d4dbd-1b34-65ab-2aa3-d16ab899709e" [ 966.255512] env[62070]: _type = "Task" [ 966.255512] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.264067] env[62070]: DEBUG oslo_vmware.api [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]526d4dbd-1b34-65ab-2aa3-d16ab899709e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.339341] env[62070]: DEBUG oslo_concurrency.lockutils [req-5a38d61d-37c2-4302-8694-5f28cc5b6aa1 req-906e0a95-e70e-4276-9c97-f7a72541521f service nova] Releasing lock "refresh_cache-cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.452039] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122159, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.507418] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "refresh_cache-1d595bc8-ab51-4443-bf32-079078f3133b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.507679] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired lock "refresh_cache-1d595bc8-ab51-4443-bf32-079078f3133b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.508150] env[62070]: DEBUG nova.network.neutron [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 966.509925] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.380s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.510505] env[62070]: DEBUG nova.compute.manager [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 966.514054] env[62070]: DEBUG oslo_concurrency.lockutils [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.441s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.514710] env[62070]: DEBUG nova.objects.instance [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lazy-loading 'resources' on Instance uuid e850734f-c49c-46d7-87ab-b0d6bed89d9b {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 966.766771] env[62070]: DEBUG oslo_vmware.api [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]526d4dbd-1b34-65ab-2aa3-d16ab899709e, 'name': SearchDatastore_Task, 'duration_secs': 0.017418} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.767190] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.767639] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 966.767916] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.768094] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.768291] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 966.768577] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f864a56f-6c1f-4179-a49b-20e938004ed6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.779557] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 966.779753] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 966.780619] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d20542e-e0e5-4ae6-b338-469dc7f62c9e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.786645] env[62070]: DEBUG oslo_vmware.api [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 966.786645] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]529d5847-bfaa-31d9-af30-250aa970eabe" [ 966.786645] env[62070]: _type = "Task" [ 966.786645] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.794908] env[62070]: DEBUG oslo_vmware.api [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]529d5847-bfaa-31d9-af30-250aa970eabe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.952077] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122159, 'name': ReconfigVM_Task, 'duration_secs': 0.626734} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.952329] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 53a1791d-38fd-4721-b82c-2f0922348300/53a1791d-38fd-4721-b82c-2f0922348300.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 966.952935] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8a51eda0-e09e-4b05-8fe6-48c7863dfce2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.960697] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 966.960697] env[62070]: value = "task-1122160" [ 966.960697] env[62070]: _type = "Task" [ 966.960697] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.969799] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122160, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.017233] env[62070]: DEBUG nova.compute.utils [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 967.018986] env[62070]: DEBUG nova.compute.manager [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 967.019192] env[62070]: DEBUG nova.network.neutron [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 967.041134] env[62070]: DEBUG oslo_concurrency.lockutils [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "2c58db1d-405f-4489-85db-c74723be4a8d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.041324] env[62070]: DEBUG oslo_concurrency.lockutils [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "2c58db1d-405f-4489-85db-c74723be4a8d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.041998] env[62070]: DEBUG oslo_concurrency.lockutils [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "2c58db1d-405f-4489-85db-c74723be4a8d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.042311] env[62070]: DEBUG oslo_concurrency.lockutils [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "2c58db1d-405f-4489-85db-c74723be4a8d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.042476] env[62070]: DEBUG oslo_concurrency.lockutils [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "2c58db1d-405f-4489-85db-c74723be4a8d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.049214] env[62070]: INFO nova.compute.manager [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Terminating instance [ 967.051879] env[62070]: DEBUG nova.compute.manager [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 967.051931] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 967.052981] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a978ed92-2f6d-4d2d-ada8-2b811997934f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.062789] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 967.065780] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f6dfc72-3789-4100-aae8-c17b15d89123 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.069054] env[62070]: DEBUG nova.policy [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '85707399ddf04d03bfb487560df1212e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd079c0ef3ed745fcaf69dc728dca4466', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 967.070980] env[62070]: DEBUG nova.network.neutron [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 967.082719] env[62070]: DEBUG oslo_vmware.api [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 967.082719] env[62070]: value = "task-1122161" [ 967.082719] env[62070]: _type = "Task" [ 967.082719] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.093564] env[62070]: DEBUG oslo_vmware.api [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122161, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.197954] env[62070]: DEBUG oslo_concurrency.lockutils [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "interface-cf52cee8-874e-44e8-a36e-49ac20f3e312-60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.198325] env[62070]: DEBUG oslo_concurrency.lockutils [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "interface-cf52cee8-874e-44e8-a36e-49ac20f3e312-60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.283331] env[62070]: DEBUG nova.network.neutron [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Updating instance_info_cache with network_info: [{"id": "20687f56-b863-4fa4-8f5d-df461da5054b", "address": "fa:16:3e:51:98:ba", "network": {"id": "516790be-56b8-409d-b1c0-a8683a45a9ec", "bridge": "br-int", "label": "tempest-ServersTestJSON-693737631-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91e246e32f29422e90fae974cfee9d8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20687f56-b8", "ovs_interfaceid": "20687f56-b863-4fa4-8f5d-df461da5054b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.299365] env[62070]: DEBUG oslo_vmware.api [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]529d5847-bfaa-31d9-af30-250aa970eabe, 'name': SearchDatastore_Task, 'duration_secs': 0.026114} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.304016] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6a45e8d-f44c-4969-8efd-3c2a4800f6cb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.314841] env[62070]: DEBUG oslo_vmware.api [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 967.314841] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ccd500-1496-f709-5c67-6fc513ed485f" [ 967.314841] env[62070]: _type = "Task" [ 967.314841] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.326746] env[62070]: DEBUG oslo_vmware.api [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52ccd500-1496-f709-5c67-6fc513ed485f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.341338] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0dc159d-3b76-46ea-9dd1-6ddd2b867e5f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.352488] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a18f598-60ee-47e0-89cd-912d77de68bb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.385268] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb714248-0896-4de1-882a-70ba4f11ba21 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.387998] env[62070]: DEBUG nova.network.neutron [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Successfully created port: 5b226cbf-df38-4b34-b591-7afc6de0a88c {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 967.395822] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c3bfec5-c38f-4d88-84bf-c3640363c911 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.410042] env[62070]: DEBUG nova.compute.provider_tree [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 967.473690] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122160, 'name': Rename_Task, 'duration_secs': 0.233113} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.473690] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 967.473690] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-11c98843-9d4f-4ea9-a380-1b79aaf9e733 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.479156] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 967.479156] env[62070]: value = "task-1122162" [ 967.479156] env[62070]: _type = "Task" [ 967.479156] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.487257] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122162, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.528510] env[62070]: DEBUG nova.compute.manager [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 967.596281] env[62070]: DEBUG oslo_vmware.api [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122161, 'name': PowerOffVM_Task, 'duration_secs': 0.204729} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.596281] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 967.596281] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 967.596281] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d420005d-28b6-4d91-acb3-7761931f3f0b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.682608] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 967.682968] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 967.683112] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Deleting the datastore file [datastore1] 2c58db1d-405f-4489-85db-c74723be4a8d {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 967.683426] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3344e320-7d16-40e4-93da-bc4110f7ecdc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.692989] env[62070]: DEBUG oslo_vmware.api [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 967.692989] env[62070]: value = "task-1122164" [ 967.692989] env[62070]: _type = "Task" [ 967.692989] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.701936] env[62070]: DEBUG oslo_concurrency.lockutils [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 967.702179] env[62070]: DEBUG oslo_concurrency.lockutils [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired lock "cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.702561] env[62070]: DEBUG oslo_vmware.api [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122164, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.703151] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39df7f63-96f1-4756-8069-755a08147e43 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.726081] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf58cf46-f3fd-41b8-ba6c-e73d2b98d65a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.756393] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Reconfiguring VM to detach interface {{(pid=62070) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1974}} [ 967.756471] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb44ae21-472e-4950-a922-a9cb0e7f165e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.784038] env[62070]: DEBUG oslo_vmware.api [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 967.784038] env[62070]: value = "task-1122165" [ 967.784038] env[62070]: _type = "Task" [ 967.784038] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.787130] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Releasing lock "refresh_cache-1d595bc8-ab51-4443-bf32-079078f3133b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 967.787130] env[62070]: DEBUG nova.compute.manager [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Instance network_info: |[{"id": "20687f56-b863-4fa4-8f5d-df461da5054b", "address": "fa:16:3e:51:98:ba", "network": {"id": "516790be-56b8-409d-b1c0-a8683a45a9ec", "bridge": "br-int", "label": "tempest-ServersTestJSON-693737631-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91e246e32f29422e90fae974cfee9d8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20687f56-b8", "ovs_interfaceid": "20687f56-b863-4fa4-8f5d-df461da5054b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 967.787654] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:98:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '359850cc-b061-4c9c-a61c-eb42e0f7c359', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '20687f56-b863-4fa4-8f5d-df461da5054b', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 967.795415] env[62070]: DEBUG oslo.service.loopingcall [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 967.796068] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 967.796325] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a8e60662-dad5-4c30-a3a4-d509eaae2a4f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.814784] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.815071] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.815286] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.815486] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.815699] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.817445] env[62070]: DEBUG oslo_vmware.api [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122165, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.818494] env[62070]: INFO nova.compute.manager [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Terminating instance [ 967.823747] env[62070]: DEBUG nova.compute.manager [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 967.823951] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 967.824760] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdd6480b-761d-47c6-8982-e3e1ef6c4a85 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.829263] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 967.829263] env[62070]: value = "task-1122166" [ 967.829263] env[62070]: _type = "Task" [ 967.829263] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.838271] env[62070]: DEBUG oslo_vmware.api [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52ccd500-1496-f709-5c67-6fc513ed485f, 'name': SearchDatastore_Task, 'duration_secs': 0.013402} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.838551] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 967.839371] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 967.839644] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] a5cba512-9b50-4ca3-93eb-345be12dc588/a5cba512-9b50-4ca3-93eb-345be12dc588.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 967.839953] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7211a67f-0fb6-4f13-801c-eb00742ec39e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.844437] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f1173fbd-e60a-4259-a167-078b67dc404e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.846713] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122166, 'name': CreateVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.851747] env[62070]: DEBUG oslo_vmware.api [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 967.851747] env[62070]: value = "task-1122168" [ 967.851747] env[62070]: _type = "Task" [ 967.851747] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.856253] env[62070]: DEBUG oslo_vmware.api [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 967.856253] env[62070]: value = "task-1122167" [ 967.856253] env[62070]: _type = "Task" [ 967.856253] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.863031] env[62070]: DEBUG oslo_vmware.api [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122168, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.868777] env[62070]: DEBUG oslo_vmware.api [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122167, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.913431] env[62070]: DEBUG nova.scheduler.client.report [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 967.990703] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122162, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.203214] env[62070]: DEBUG oslo_vmware.api [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122164, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.241597} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.203544] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 968.203821] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 968.204097] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 968.204253] env[62070]: INFO nova.compute.manager [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Took 1.15 seconds to destroy the instance on the hypervisor. [ 968.204543] env[62070]: DEBUG oslo.service.loopingcall [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 968.204765] env[62070]: DEBUG nova.compute.manager [-] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 968.204860] env[62070]: DEBUG nova.network.neutron [-] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 968.296425] env[62070]: DEBUG oslo_vmware.api [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122165, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.338083] env[62070]: DEBUG nova.compute.manager [req-bf8c6259-5d82-4954-a0ae-0f19c99b2e85 req-d9590c73-1587-4eb0-862a-46c3c065b55f service nova] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Received event network-changed-20687f56-b863-4fa4-8f5d-df461da5054b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 968.338323] env[62070]: DEBUG nova.compute.manager [req-bf8c6259-5d82-4954-a0ae-0f19c99b2e85 req-d9590c73-1587-4eb0-862a-46c3c065b55f service nova] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Refreshing instance network info cache due to event network-changed-20687f56-b863-4fa4-8f5d-df461da5054b. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 968.338626] env[62070]: DEBUG oslo_concurrency.lockutils [req-bf8c6259-5d82-4954-a0ae-0f19c99b2e85 req-d9590c73-1587-4eb0-862a-46c3c065b55f service nova] Acquiring lock "refresh_cache-1d595bc8-ab51-4443-bf32-079078f3133b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 968.338743] env[62070]: DEBUG oslo_concurrency.lockutils [req-bf8c6259-5d82-4954-a0ae-0f19c99b2e85 req-d9590c73-1587-4eb0-862a-46c3c065b55f service nova] Acquired lock "refresh_cache-1d595bc8-ab51-4443-bf32-079078f3133b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.338909] env[62070]: DEBUG nova.network.neutron [req-bf8c6259-5d82-4954-a0ae-0f19c99b2e85 req-d9590c73-1587-4eb0-862a-46c3c065b55f service nova] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Refreshing network info cache for port 20687f56-b863-4fa4-8f5d-df461da5054b {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 968.345223] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122166, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.364724] env[62070]: DEBUG oslo_vmware.api [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122168, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.371331] env[62070]: DEBUG oslo_vmware.api [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122167, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.420020] env[62070]: DEBUG oslo_concurrency.lockutils [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.906s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.422409] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.878s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.422657] env[62070]: DEBUG nova.objects.instance [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Lazy-loading 'resources' on Instance uuid 1440361b-d3b4-4c1c-995c-fe7ff99ee297 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 968.443211] env[62070]: INFO nova.scheduler.client.report [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Deleted allocations for instance e850734f-c49c-46d7-87ab-b0d6bed89d9b [ 968.490654] env[62070]: DEBUG oslo_vmware.api [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122162, 'name': PowerOnVM_Task, 'duration_secs': 0.552715} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.491010] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 968.491263] env[62070]: INFO nova.compute.manager [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Took 8.19 seconds to spawn the instance on the hypervisor. [ 968.491453] env[62070]: DEBUG nova.compute.manager [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 968.494133] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b344eb8-48ee-4094-ba5b-4447f94f3662 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.536102] env[62070]: DEBUG nova.compute.manager [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 968.560925] env[62070]: DEBUG nova.virt.hardware [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 968.561200] env[62070]: DEBUG nova.virt.hardware [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 968.561385] env[62070]: DEBUG nova.virt.hardware [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 968.561546] env[62070]: DEBUG nova.virt.hardware [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 968.561700] env[62070]: DEBUG nova.virt.hardware [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 968.561853] env[62070]: DEBUG nova.virt.hardware [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 968.562072] env[62070]: DEBUG nova.virt.hardware [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 968.562254] env[62070]: DEBUG nova.virt.hardware [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 968.562431] env[62070]: DEBUG nova.virt.hardware [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 968.562598] env[62070]: DEBUG nova.virt.hardware [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 968.562774] env[62070]: DEBUG nova.virt.hardware [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 968.563614] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa687c9e-aab2-42b0-84ec-642b8f4df76e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.572065] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01752535-bfcc-4a12-8dc6-3408dce6af25 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.794765] env[62070]: DEBUG oslo_vmware.api [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122165, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.840897] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122166, 'name': CreateVM_Task, 'duration_secs': 0.565862} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.841140] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 968.842910] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 968.842910] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.842910] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 968.842910] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6d58e68-020f-46f3-b68e-3e1da7adbca6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.849333] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 968.849333] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52c60ccf-bd60-f698-ee23-bae8d8c31274" [ 968.849333] env[62070]: _type = "Task" [ 968.849333] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.864268] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52c60ccf-bd60-f698-ee23-bae8d8c31274, 'name': SearchDatastore_Task, 'duration_secs': 0.01056} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.867786] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 968.868105] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 968.868408] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 968.868582] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.868786] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 968.870390] env[62070]: DEBUG oslo_vmware.api [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122168, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.538509} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.872344] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1a19d041-838c-4ea4-82c8-37bab897731e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.874565] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] a5cba512-9b50-4ca3-93eb-345be12dc588/a5cba512-9b50-4ca3-93eb-345be12dc588.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 968.874836] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 968.875144] env[62070]: DEBUG oslo_vmware.api [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122167, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.875395] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f3709d49-3638-42bb-8208-d92b0d81d576 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.884141] env[62070]: DEBUG oslo_vmware.api [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 968.884141] env[62070]: value = "task-1122169" [ 968.884141] env[62070]: _type = "Task" [ 968.884141] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.889062] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 968.892164] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 968.892164] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0794bb97-8f67-468a-b115-c8f6535d55c6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.897157] env[62070]: DEBUG oslo_vmware.api [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122169, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.899232] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 968.899232] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5208cbea-f0df-acda-31d3-893ce4ff03b7" [ 968.899232] env[62070]: _type = "Task" [ 968.899232] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.904384] env[62070]: DEBUG nova.compute.manager [req-552eb36d-721f-45b9-8101-8482e583e5fd req-1e82bcc8-d547-498b-8980-be75fefb6880 service nova] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Received event network-vif-plugged-5b226cbf-df38-4b34-b591-7afc6de0a88c {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 968.904645] env[62070]: DEBUG oslo_concurrency.lockutils [req-552eb36d-721f-45b9-8101-8482e583e5fd req-1e82bcc8-d547-498b-8980-be75fefb6880 service nova] Acquiring lock "b101c79a-abfd-4104-aaed-096995fb2337-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.904925] env[62070]: DEBUG oslo_concurrency.lockutils [req-552eb36d-721f-45b9-8101-8482e583e5fd req-1e82bcc8-d547-498b-8980-be75fefb6880 service nova] Lock "b101c79a-abfd-4104-aaed-096995fb2337-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.905514] env[62070]: DEBUG oslo_concurrency.lockutils [req-552eb36d-721f-45b9-8101-8482e583e5fd req-1e82bcc8-d547-498b-8980-be75fefb6880 service nova] Lock "b101c79a-abfd-4104-aaed-096995fb2337-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.905514] env[62070]: DEBUG nova.compute.manager [req-552eb36d-721f-45b9-8101-8482e583e5fd req-1e82bcc8-d547-498b-8980-be75fefb6880 service nova] [instance: b101c79a-abfd-4104-aaed-096995fb2337] No waiting events found dispatching network-vif-plugged-5b226cbf-df38-4b34-b591-7afc6de0a88c {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 968.905514] env[62070]: WARNING nova.compute.manager [req-552eb36d-721f-45b9-8101-8482e583e5fd req-1e82bcc8-d547-498b-8980-be75fefb6880 service nova] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Received unexpected event network-vif-plugged-5b226cbf-df38-4b34-b591-7afc6de0a88c for instance with vm_state building and task_state spawning. [ 968.913661] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5208cbea-f0df-acda-31d3-893ce4ff03b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.955508] env[62070]: DEBUG oslo_concurrency.lockutils [None req-447f6c6a-b2b6-4357-bd55-081d5f3e8906 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "e850734f-c49c-46d7-87ab-b0d6bed89d9b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.283s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.010030] env[62070]: DEBUG nova.network.neutron [-] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.013839] env[62070]: DEBUG nova.network.neutron [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Successfully updated port: 5b226cbf-df38-4b34-b591-7afc6de0a88c {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 969.018818] env[62070]: INFO nova.compute.manager [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Took 39.83 seconds to build instance. [ 969.193124] env[62070]: DEBUG nova.network.neutron [req-bf8c6259-5d82-4954-a0ae-0f19c99b2e85 req-d9590c73-1587-4eb0-862a-46c3c065b55f service nova] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Updated VIF entry in instance network info cache for port 20687f56-b863-4fa4-8f5d-df461da5054b. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 969.193511] env[62070]: DEBUG nova.network.neutron [req-bf8c6259-5d82-4954-a0ae-0f19c99b2e85 req-d9590c73-1587-4eb0-862a-46c3c065b55f service nova] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Updating instance_info_cache with network_info: [{"id": "20687f56-b863-4fa4-8f5d-df461da5054b", "address": "fa:16:3e:51:98:ba", "network": {"id": "516790be-56b8-409d-b1c0-a8683a45a9ec", "bridge": "br-int", "label": "tempest-ServersTestJSON-693737631-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91e246e32f29422e90fae974cfee9d8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20687f56-b8", "ovs_interfaceid": "20687f56-b863-4fa4-8f5d-df461da5054b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.224339] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75eeb606-4205-4be9-9b1b-dbcae43a222d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.232746] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b096fdd-b574-4663-90d4-1f1153c6e3b9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.266041] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e71a8a51-ddba-41c4-a101-b082a0d58515 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.274741] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fdb8278-b5ab-4c50-95ce-310468705824 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.288738] env[62070]: DEBUG nova.compute.provider_tree [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 969.298692] env[62070]: DEBUG oslo_vmware.api [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122165, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.367969] env[62070]: DEBUG oslo_vmware.api [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122167, 'name': PowerOffVM_Task, 'duration_secs': 1.272813} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.368303] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 969.368477] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 969.368784] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-df0874d8-81c4-445b-86b8-4583a2306a7e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.393863] env[62070]: DEBUG oslo_vmware.api [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122169, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.443844} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.393863] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 969.394637] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a699011d-e522-4174-b12c-a6f2576a1f91 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.417036] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] a5cba512-9b50-4ca3-93eb-345be12dc588/a5cba512-9b50-4ca3-93eb-345be12dc588.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 969.420147] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa293d0e-8e3a-44e4-b363-610e7805d6be {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.441546] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5208cbea-f0df-acda-31d3-893ce4ff03b7, 'name': SearchDatastore_Task, 'duration_secs': 0.020646} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.443389] env[62070]: DEBUG oslo_vmware.api [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 969.443389] env[62070]: value = "task-1122171" [ 969.443389] env[62070]: _type = "Task" [ 969.443389] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.443626] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b501a5f-7b62-40b9-a6d2-6f4b88175bcf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.454130] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 969.454130] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52855697-310e-7163-adf9-53e1b1ef5576" [ 969.454130] env[62070]: _type = "Task" [ 969.454130] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.462223] env[62070]: DEBUG oslo_vmware.api [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122171, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.462984] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 969.463174] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 969.463598] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Deleting the datastore file [datastore1] 4a5f644a-1670-4c6b-a762-f87f1ee4cce5 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 969.463947] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df515af3-bed2-45ef-9a7a-9678d1f92f6e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.469228] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52855697-310e-7163-adf9-53e1b1ef5576, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.474812] env[62070]: DEBUG oslo_vmware.api [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 969.474812] env[62070]: value = "task-1122172" [ 969.474812] env[62070]: _type = "Task" [ 969.474812] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.483581] env[62070]: DEBUG oslo_vmware.api [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122172, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.515291] env[62070]: INFO nova.compute.manager [-] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Took 1.31 seconds to deallocate network for instance. [ 969.518077] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "refresh_cache-b101c79a-abfd-4104-aaed-096995fb2337" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 969.518077] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired lock "refresh_cache-b101c79a-abfd-4104-aaed-096995fb2337" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.518289] env[62070]: DEBUG nova.network.neutron [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 969.522348] env[62070]: DEBUG oslo_concurrency.lockutils [None req-242f2c71-01fd-4276-a650-74f4ae5a169d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "53a1791d-38fd-4721-b82c-2f0922348300" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.341s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.696698] env[62070]: DEBUG oslo_concurrency.lockutils [req-bf8c6259-5d82-4954-a0ae-0f19c99b2e85 req-d9590c73-1587-4eb0-862a-46c3c065b55f service nova] Releasing lock "refresh_cache-1d595bc8-ab51-4443-bf32-079078f3133b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 969.795293] env[62070]: DEBUG nova.scheduler.client.report [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 969.805732] env[62070]: DEBUG oslo_vmware.api [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122165, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.861516] env[62070]: DEBUG oslo_concurrency.lockutils [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquiring lock "e4cf42ff-8440-42bc-b629-4b712fd94e99" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.861643] env[62070]: DEBUG oslo_concurrency.lockutils [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "e4cf42ff-8440-42bc-b629-4b712fd94e99" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.957026] env[62070]: DEBUG oslo_vmware.api [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122171, 'name': ReconfigVM_Task, 'duration_secs': 0.306001} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.959181] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Reconfigured VM instance instance-00000058 to attach disk [datastore2] a5cba512-9b50-4ca3-93eb-345be12dc588/a5cba512-9b50-4ca3-93eb-345be12dc588.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 969.959843] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c937a458-5223-4c7f-b078-65f2da2ab503 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.967630] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52855697-310e-7163-adf9-53e1b1ef5576, 'name': SearchDatastore_Task, 'duration_secs': 0.0135} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.968771] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 969.969064] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 1d595bc8-ab51-4443-bf32-079078f3133b/1d595bc8-ab51-4443-bf32-079078f3133b.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 969.969366] env[62070]: DEBUG oslo_vmware.api [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 969.969366] env[62070]: value = "task-1122173" [ 969.969366] env[62070]: _type = "Task" [ 969.969366] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.969569] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7ebfe70d-3cb5-4b43-87c4-c63ba2ae2c57 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.982126] env[62070]: DEBUG oslo_vmware.api [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122173, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.983164] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 969.983164] env[62070]: value = "task-1122174" [ 969.983164] env[62070]: _type = "Task" [ 969.983164] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.990251] env[62070]: DEBUG oslo_vmware.api [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122172, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186024} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.994171] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 969.994171] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 969.994171] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 969.994415] env[62070]: INFO nova.compute.manager [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Took 2.17 seconds to destroy the instance on the hypervisor. [ 969.994714] env[62070]: DEBUG oslo.service.loopingcall [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 969.995667] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122174, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.995829] env[62070]: DEBUG nova.compute.manager [-] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 969.995890] env[62070]: DEBUG nova.network.neutron [-] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 970.027230] env[62070]: DEBUG oslo_concurrency.lockutils [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.059519] env[62070]: DEBUG nova.network.neutron [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 970.204025] env[62070]: DEBUG nova.network.neutron [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Updating instance_info_cache with network_info: [{"id": "5b226cbf-df38-4b34-b591-7afc6de0a88c", "address": "fa:16:3e:a3:e3:84", "network": {"id": "4888f989-958d-49ff-bf5a-06873e4cc624", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-906255456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d079c0ef3ed745fcaf69dc728dca4466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b226cbf-df", "ovs_interfaceid": "5b226cbf-df38-4b34-b591-7afc6de0a88c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.302258] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.880s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.305011] env[62070]: DEBUG oslo_vmware.api [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122165, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.305529] env[62070]: DEBUG oslo_concurrency.lockutils [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.960s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.305798] env[62070]: DEBUG nova.objects.instance [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lazy-loading 'resources' on Instance uuid fb054a32-c1aa-4884-a087-da5ad34cf3c4 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 970.329814] env[62070]: INFO nova.scheduler.client.report [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Deleted allocations for instance 1440361b-d3b4-4c1c-995c-fe7ff99ee297 [ 970.364249] env[62070]: DEBUG nova.compute.manager [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 970.390633] env[62070]: DEBUG nova.compute.manager [req-b62e1b37-4a96-484c-96c3-42997770afdd req-db1e80d8-c3e0-4950-b098-b758c5e60049 service nova] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Received event network-vif-deleted-8876137b-4c95-4f50-8bf9-ad7d44ac5052 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 970.481255] env[62070]: DEBUG oslo_vmware.api [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122173, 'name': Rename_Task, 'duration_secs': 0.153672} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.482108] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 970.482679] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-92bdaf2c-3fb4-4fc2-896c-1890d8cbe252 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.495903] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "6cba961f-f9f9-4d3c-853a-049a014c9dbb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.495903] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "6cba961f-f9f9-4d3c-853a-049a014c9dbb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.499369] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122174, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480439} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.500222] env[62070]: DEBUG oslo_vmware.api [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 970.500222] env[62070]: value = "task-1122175" [ 970.500222] env[62070]: _type = "Task" [ 970.500222] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.500446] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 1d595bc8-ab51-4443-bf32-079078f3133b/1d595bc8-ab51-4443-bf32-079078f3133b.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 970.500966] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 970.500966] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cc7579c9-1e04-426e-9607-c652eb0bd1a5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.517068] env[62070]: DEBUG oslo_vmware.api [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122175, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.517346] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 970.517346] env[62070]: value = "task-1122176" [ 970.517346] env[62070]: _type = "Task" [ 970.517346] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.529019] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122176, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.703757] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Releasing lock "refresh_cache-b101c79a-abfd-4104-aaed-096995fb2337" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 970.704115] env[62070]: DEBUG nova.compute.manager [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Instance network_info: |[{"id": "5b226cbf-df38-4b34-b591-7afc6de0a88c", "address": "fa:16:3e:a3:e3:84", "network": {"id": "4888f989-958d-49ff-bf5a-06873e4cc624", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-906255456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d079c0ef3ed745fcaf69dc728dca4466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b226cbf-df", "ovs_interfaceid": "5b226cbf-df38-4b34-b591-7afc6de0a88c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 970.704583] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a3:e3:84', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7e0240aa-a694-48fc-a0f9-6f2d3e71aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5b226cbf-df38-4b34-b591-7afc6de0a88c', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 970.712589] env[62070]: DEBUG oslo.service.loopingcall [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 970.712865] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 970.713127] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-87553405-8d7a-4a5c-a228-06a63b8d9b79 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.736268] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 970.736268] env[62070]: value = "task-1122177" [ 970.736268] env[62070]: _type = "Task" [ 970.736268] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.752785] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122177, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.801678] env[62070]: DEBUG oslo_vmware.api [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122165, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.843063] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d98f3ade-369e-4dac-bd29-f61163b09e57 tempest-ServerShowV257Test-2131685284 tempest-ServerShowV257Test-2131685284-project-member] Lock "1440361b-d3b4-4c1c-995c-fe7ff99ee297" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.042s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.896328] env[62070]: DEBUG oslo_concurrency.lockutils [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.936873] env[62070]: DEBUG nova.compute.manager [req-cb48e04b-1d4c-4242-9b44-f64a787ccf48 req-4197a71f-ae26-4d6e-b5ee-ad18b1806e39 service nova] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Received event network-changed-5b226cbf-df38-4b34-b591-7afc6de0a88c {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 970.937075] env[62070]: DEBUG nova.compute.manager [req-cb48e04b-1d4c-4242-9b44-f64a787ccf48 req-4197a71f-ae26-4d6e-b5ee-ad18b1806e39 service nova] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Refreshing instance network info cache due to event network-changed-5b226cbf-df38-4b34-b591-7afc6de0a88c. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 970.937321] env[62070]: DEBUG oslo_concurrency.lockutils [req-cb48e04b-1d4c-4242-9b44-f64a787ccf48 req-4197a71f-ae26-4d6e-b5ee-ad18b1806e39 service nova] Acquiring lock "refresh_cache-b101c79a-abfd-4104-aaed-096995fb2337" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.937467] env[62070]: DEBUG oslo_concurrency.lockutils [req-cb48e04b-1d4c-4242-9b44-f64a787ccf48 req-4197a71f-ae26-4d6e-b5ee-ad18b1806e39 service nova] Acquired lock "refresh_cache-b101c79a-abfd-4104-aaed-096995fb2337" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.937709] env[62070]: DEBUG nova.network.neutron [req-cb48e04b-1d4c-4242-9b44-f64a787ccf48 req-4197a71f-ae26-4d6e-b5ee-ad18b1806e39 service nova] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Refreshing network info cache for port 5b226cbf-df38-4b34-b591-7afc6de0a88c {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 971.001958] env[62070]: DEBUG nova.compute.manager [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 971.014622] env[62070]: DEBUG oslo_vmware.api [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122175, 'name': PowerOnVM_Task, 'duration_secs': 0.500307} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.014918] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 971.017146] env[62070]: INFO nova.compute.manager [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Took 8.34 seconds to spawn the instance on the hypervisor. [ 971.017377] env[62070]: DEBUG nova.compute.manager [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 971.018770] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d1dc5b-acef-4028-9653-f74677286866 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.039503] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122176, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07261} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.039841] env[62070]: DEBUG nova.network.neutron [-] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.045487] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 971.049704] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47094d91-13f4-4f07-8396-a167079ef7e4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.078292] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] 1d595bc8-ab51-4443-bf32-079078f3133b/1d595bc8-ab51-4443-bf32-079078f3133b.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 971.082288] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d4ca7a53-b5c1-4c22-970f-350c62ce5d84 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.105077] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 971.105077] env[62070]: value = "task-1122178" [ 971.105077] env[62070]: _type = "Task" [ 971.105077] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.122510] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122178, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.206613] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c4ed9dc-002f-4c73-907a-30a3a3e7229a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.218479] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb29441-667b-4061-8371-9d697dbfe3da {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.256164] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5eb0411-6cd2-4eae-a191-1158fcb6cd8d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.266665] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122177, 'name': CreateVM_Task, 'duration_secs': 0.476158} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.267972] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-363fa234-d734-4dfd-b7d8-2a881d2fb4a2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.271742] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 971.272513] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 971.272696] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.273038] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 971.273738] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02f2a8b0-d243-4d08-8e9d-f4d8df4c6cb3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.284515] env[62070]: DEBUG nova.compute.provider_tree [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 971.287091] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 971.287091] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]520eb172-9c94-2e17-ba4e-ce835fab0e0d" [ 971.287091] env[62070]: _type = "Task" [ 971.287091] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.297354] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]520eb172-9c94-2e17-ba4e-ce835fab0e0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.304438] env[62070]: DEBUG oslo_vmware.api [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122165, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.525790] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.546028] env[62070]: INFO nova.compute.manager [-] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Took 1.55 seconds to deallocate network for instance. [ 971.558958] env[62070]: INFO nova.compute.manager [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Took 36.91 seconds to build instance. [ 971.619688] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122178, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.714775] env[62070]: DEBUG nova.network.neutron [req-cb48e04b-1d4c-4242-9b44-f64a787ccf48 req-4197a71f-ae26-4d6e-b5ee-ad18b1806e39 service nova] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Updated VIF entry in instance network info cache for port 5b226cbf-df38-4b34-b591-7afc6de0a88c. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 971.715155] env[62070]: DEBUG nova.network.neutron [req-cb48e04b-1d4c-4242-9b44-f64a787ccf48 req-4197a71f-ae26-4d6e-b5ee-ad18b1806e39 service nova] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Updating instance_info_cache with network_info: [{"id": "5b226cbf-df38-4b34-b591-7afc6de0a88c", "address": "fa:16:3e:a3:e3:84", "network": {"id": "4888f989-958d-49ff-bf5a-06873e4cc624", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-906255456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d079c0ef3ed745fcaf69dc728dca4466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b226cbf-df", "ovs_interfaceid": "5b226cbf-df38-4b34-b591-7afc6de0a88c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.809127] env[62070]: DEBUG oslo_vmware.api [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122165, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.810281] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]520eb172-9c94-2e17-ba4e-ce835fab0e0d, 'name': SearchDatastore_Task, 'duration_secs': 0.048625} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.810550] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.810782] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 971.811136] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 971.811258] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.811441] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 971.811702] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-faf353ec-56a0-4d25-8614-b0351bc4fc2f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.817986] env[62070]: ERROR nova.scheduler.client.report [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [req-b86818f5-8428-4640-a063-74a899b4f8fe] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 21c7c111-1b69-4468-b2c4-5dd96014fbd6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b86818f5-8428-4640-a063-74a899b4f8fe"}]} [ 971.833126] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 971.833353] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 971.834510] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1028a0a5-a483-4e39-82d2-dec73803738a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.840632] env[62070]: DEBUG nova.scheduler.client.report [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Refreshing inventories for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 971.842054] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 971.842054] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5272d3f3-1429-20b8-1e93-690c482a0bc4" [ 971.842054] env[62070]: _type = "Task" [ 971.842054] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.850638] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5272d3f3-1429-20b8-1e93-690c482a0bc4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.865456] env[62070]: DEBUG nova.scheduler.client.report [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Updating ProviderTree inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 971.865711] env[62070]: DEBUG nova.compute.provider_tree [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 971.878201] env[62070]: DEBUG nova.scheduler.client.report [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Refreshing aggregate associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, aggregates: None {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 971.900332] env[62070]: DEBUG nova.scheduler.client.report [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Refreshing trait associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 972.056441] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.061262] env[62070]: DEBUG oslo_concurrency.lockutils [None req-26f7fbc8-03f4-42ad-82c9-20e385f600f8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "a5cba512-9b50-4ca3-93eb-345be12dc588" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.422s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.115921] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122178, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.180299] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a8d6b09-8983-4e8c-ab2a-49bc53561b57 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.188859] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b552314-f2e6-491c-8614-2bf0c1e2d87d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.221848] env[62070]: DEBUG oslo_concurrency.lockutils [req-cb48e04b-1d4c-4242-9b44-f64a787ccf48 req-4197a71f-ae26-4d6e-b5ee-ad18b1806e39 service nova] Releasing lock "refresh_cache-b101c79a-abfd-4104-aaed-096995fb2337" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.223151] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f398d1-af09-4eeb-80e8-5fcd05ddced8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.231644] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9733fa7a-929c-4aac-8ed3-b1da85992c34 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.247676] env[62070]: DEBUG nova.compute.provider_tree [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 972.304313] env[62070]: DEBUG oslo_vmware.api [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122165, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.357020] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5272d3f3-1429-20b8-1e93-690c482a0bc4, 'name': SearchDatastore_Task, 'duration_secs': 0.054843} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.357020] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be967a02-c658-46bd-bb52-190ef91b144b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.362862] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 972.362862] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5264b29e-df27-d56d-8328-607555183c9b" [ 972.362862] env[62070]: _type = "Task" [ 972.362862] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.122183] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5264b29e-df27-d56d-8328-607555183c9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.122369] env[62070]: WARNING oslo_vmware.common.loopingcall [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] task run outlasted interval by 0.258957 sec [ 973.129761] env[62070]: DEBUG nova.compute.manager [req-8accff5d-bab1-4def-81f7-06e878a804cd req-0533bf31-fe86-4b20-899f-30dc4d7fc8ba service nova] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Received event network-vif-deleted-3a8213ef-a979-487a-8756-7bfecdf4ba10 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 973.143052] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122178, 'name': ReconfigVM_Task, 'duration_secs': 1.031474} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.143385] env[62070]: DEBUG oslo_vmware.api [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122165, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.147064] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Reconfigured VM instance instance-00000059 to attach disk [datastore2] 1d595bc8-ab51-4443-bf32-079078f3133b/1d595bc8-ab51-4443-bf32-079078f3133b.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 973.147801] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5264b29e-df27-d56d-8328-607555183c9b, 'name': SearchDatastore_Task, 'duration_secs': 0.013755} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.148056] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-08f73f49-8648-49ae-87ed-eb03b00cf6cf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.150365] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.150365] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] b101c79a-abfd-4104-aaed-096995fb2337/b101c79a-abfd-4104-aaed-096995fb2337.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 973.150365] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5cc6db91-d6f4-4184-ae15-ec65607d65b8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.154227] env[62070]: DEBUG nova.scheduler.client.report [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Updated inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with generation 124 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 973.154478] env[62070]: DEBUG nova.compute.provider_tree [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Updating resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 generation from 124 to 125 during operation: update_inventory {{(pid=62070) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 973.154716] env[62070]: DEBUG nova.compute.provider_tree [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 973.160024] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 973.160024] env[62070]: value = "task-1122180" [ 973.160024] env[62070]: _type = "Task" [ 973.160024] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.161213] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 973.161213] env[62070]: value = "task-1122179" [ 973.161213] env[62070]: _type = "Task" [ 973.161213] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.179231] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122179, 'name': Rename_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.182362] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122180, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.631462] env[62070]: DEBUG oslo_vmware.api [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122165, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.662365] env[62070]: DEBUG oslo_concurrency.lockutils [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.357s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.665281] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 9.180s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.665988] env[62070]: DEBUG nova.objects.instance [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62070) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 973.683764] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122180, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522941} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.687148] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] b101c79a-abfd-4104-aaed-096995fb2337/b101c79a-abfd-4104-aaed-096995fb2337.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 973.687417] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 973.687875] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122179, 'name': Rename_Task, 'duration_secs': 0.214231} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.689047] env[62070]: INFO nova.scheduler.client.report [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Deleted allocations for instance fb054a32-c1aa-4884-a087-da5ad34cf3c4 [ 973.690080] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-be1b38ed-ec66-4e50-8dbe-98c9709c8aa0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.692573] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 973.695590] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e16cfcce-3858-4ef4-a59b-6fa865a59cd1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.705463] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 973.705463] env[62070]: value = "task-1122182" [ 973.705463] env[62070]: _type = "Task" [ 973.705463] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.705738] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 973.705738] env[62070]: value = "task-1122181" [ 973.705738] env[62070]: _type = "Task" [ 973.705738] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.719020] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122182, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.722423] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122181, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.132942] env[62070]: DEBUG oslo_vmware.api [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122165, 'name': ReconfigVM_Task, 'duration_secs': 5.903325} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.133309] env[62070]: DEBUG oslo_concurrency.lockutils [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Releasing lock "cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.133572] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Reconfigured VM to detach interface {{(pid=62070) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1984}} [ 974.201021] env[62070]: DEBUG oslo_concurrency.lockutils [None req-10152dbe-3561-4ca4-810f-51cf10506bc8 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "fb054a32-c1aa-4884-a087-da5ad34cf3c4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.839s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.224638] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122181, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07752} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.224876] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122182, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.225150] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 974.226086] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eab2d45-ebb9-4c86-836f-2b08f1ce3e35 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.250467] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] b101c79a-abfd-4104-aaed-096995fb2337/b101c79a-abfd-4104-aaed-096995fb2337.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 974.250806] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77575b07-792f-4cc7-83cf-196d5d5788a3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.274416] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 974.274416] env[62070]: value = "task-1122183" [ 974.274416] env[62070]: _type = "Task" [ 974.274416] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.288069] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122183, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.451979] env[62070]: DEBUG nova.compute.manager [req-7611154f-9979-491b-95ed-939e85295087 req-0fb4af23-c373-432a-861b-def9a5824b05 service nova] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Received event network-changed-79f2a280-e16e-4dcd-9a80-21c1fc225a8c {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 974.452278] env[62070]: DEBUG nova.compute.manager [req-7611154f-9979-491b-95ed-939e85295087 req-0fb4af23-c373-432a-861b-def9a5824b05 service nova] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Refreshing instance network info cache due to event network-changed-79f2a280-e16e-4dcd-9a80-21c1fc225a8c. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 974.452569] env[62070]: DEBUG oslo_concurrency.lockutils [req-7611154f-9979-491b-95ed-939e85295087 req-0fb4af23-c373-432a-861b-def9a5824b05 service nova] Acquiring lock "refresh_cache-a5cba512-9b50-4ca3-93eb-345be12dc588" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.452865] env[62070]: DEBUG oslo_concurrency.lockutils [req-7611154f-9979-491b-95ed-939e85295087 req-0fb4af23-c373-432a-861b-def9a5824b05 service nova] Acquired lock "refresh_cache-a5cba512-9b50-4ca3-93eb-345be12dc588" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.452930] env[62070]: DEBUG nova.network.neutron [req-7611154f-9979-491b-95ed-939e85295087 req-0fb4af23-c373-432a-861b-def9a5824b05 service nova] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Refreshing network info cache for port 79f2a280-e16e-4dcd-9a80-21c1fc225a8c {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 974.680361] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0e18b964-96a3-46d4-a9b9-4704ba1dc707 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.681587] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.890s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.681831] env[62070]: DEBUG nova.objects.instance [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lazy-loading 'resources' on Instance uuid 519cad6a-ebe0-42db-a19e-27249b83436e {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 974.718610] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122182, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.786199] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122183, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.219332] env[62070]: DEBUG oslo_vmware.api [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122182, 'name': PowerOnVM_Task, 'duration_secs': 1.091429} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.222179] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 975.222664] env[62070]: INFO nova.compute.manager [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Took 10.09 seconds to spawn the instance on the hypervisor. [ 975.222664] env[62070]: DEBUG nova.compute.manager [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 975.223714] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab209143-bf2f-4e52-accc-22e507d8e210 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.282346] env[62070]: DEBUG nova.network.neutron [req-7611154f-9979-491b-95ed-939e85295087 req-0fb4af23-c373-432a-861b-def9a5824b05 service nova] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Updated VIF entry in instance network info cache for port 79f2a280-e16e-4dcd-9a80-21c1fc225a8c. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 975.282694] env[62070]: DEBUG nova.network.neutron [req-7611154f-9979-491b-95ed-939e85295087 req-0fb4af23-c373-432a-861b-def9a5824b05 service nova] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Updating instance_info_cache with network_info: [{"id": "79f2a280-e16e-4dcd-9a80-21c1fc225a8c", "address": "fa:16:3e:c0:7a:98", "network": {"id": "6ea9aade-1b40-4ce8-a502-14ff09a4ab40", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1617295069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "735d24ccc5614660a5b34d77af648f94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79f2a280-e1", "ovs_interfaceid": "79f2a280-e16e-4dcd-9a80-21c1fc225a8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.288471] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122183, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.450559] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1146a7ef-a2c1-40a0-b896-c1d74b14506d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.458573] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ebf268-e8e7-4b68-bb2a-173c5c26f4ec {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.488015] env[62070]: DEBUG oslo_concurrency.lockutils [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "refresh_cache-cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.488260] env[62070]: DEBUG oslo_concurrency.lockutils [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquired lock "refresh_cache-cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.488466] env[62070]: DEBUG nova.network.neutron [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 975.490492] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20a97822-e049-4a4b-8b8f-726ad3191433 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.499612] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-474a5b10-3e26-496a-bde7-71c7f32b03fe {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.514227] env[62070]: DEBUG nova.compute.provider_tree [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 975.748862] env[62070]: INFO nova.compute.manager [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Took 25.33 seconds to build instance. [ 975.775022] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "cf52cee8-874e-44e8-a36e-49ac20f3e312" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.775022] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "cf52cee8-874e-44e8-a36e-49ac20f3e312" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.775022] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "cf52cee8-874e-44e8-a36e-49ac20f3e312-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.775022] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "cf52cee8-874e-44e8-a36e-49ac20f3e312-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.775022] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "cf52cee8-874e-44e8-a36e-49ac20f3e312-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.775584] env[62070]: INFO nova.compute.manager [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Terminating instance [ 975.777928] env[62070]: DEBUG nova.compute.manager [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 975.778145] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 975.783473] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-184ec837-56f0-4fe3-83a5-5f4f6be1959b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.786048] env[62070]: DEBUG oslo_concurrency.lockutils [req-7611154f-9979-491b-95ed-939e85295087 req-0fb4af23-c373-432a-861b-def9a5824b05 service nova] Releasing lock "refresh_cache-a5cba512-9b50-4ca3-93eb-345be12dc588" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.792531] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122183, 'name': ReconfigVM_Task, 'duration_secs': 1.151087} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.794535] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Reconfigured VM instance instance-0000005a to attach disk [datastore2] b101c79a-abfd-4104-aaed-096995fb2337/b101c79a-abfd-4104-aaed-096995fb2337.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 975.795441] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 975.795695] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-04fc92cb-e8c3-46ac-bad2-0ce617c5789d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.797230] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-86893344-b4a3-47b3-b14f-510177de9634 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.804158] env[62070]: DEBUG oslo_vmware.api [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 975.804158] env[62070]: value = "task-1122185" [ 975.804158] env[62070]: _type = "Task" [ 975.804158] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.805484] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 975.805484] env[62070]: value = "task-1122184" [ 975.805484] env[62070]: _type = "Task" [ 975.805484] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.817281] env[62070]: DEBUG oslo_vmware.api [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122185, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.820487] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122184, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.017280] env[62070]: DEBUG nova.scheduler.client.report [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 976.250376] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f98075f3-04a2-432f-a2ed-d24c86458581 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "1d595bc8-ab51-4443-bf32-079078f3133b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.840s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.252726] env[62070]: INFO nova.network.neutron [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Port 60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 976.253164] env[62070]: DEBUG nova.network.neutron [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Updating instance_info_cache with network_info: [{"id": "c06feb60-bfb1-47ea-8764-52391d9b0b78", "address": "fa:16:3e:66:3c:b1", "network": {"id": "48dc51c7-cfa4-452e-9d72-2968d9a40dfa", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-274800531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.203", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "925dff51764c4b56ae7ea05fbde2ecdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc06feb60-bf", "ovs_interfaceid": "c06feb60-bfb1-47ea-8764-52391d9b0b78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.319440] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122184, 'name': Rename_Task, 'duration_secs': 0.483775} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.322636] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 976.322780] env[62070]: DEBUG oslo_vmware.api [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122185, 'name': PowerOffVM_Task, 'duration_secs': 0.481951} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.323013] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ce07d07a-7c50-4489-94d8-4075b6496dae {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.324457] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 976.324633] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 976.324864] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9865008f-de13-4cfd-ac4a-2548628a2ece {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.332447] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 976.332447] env[62070]: value = "task-1122186" [ 976.332447] env[62070]: _type = "Task" [ 976.332447] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.340877] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122186, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.401672] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 976.401969] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 976.402264] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Deleting the datastore file [datastore2] cf52cee8-874e-44e8-a36e-49ac20f3e312 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 976.402652] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aee5aa70-4a9a-4558-99ff-2159647194e2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.410710] env[62070]: DEBUG oslo_vmware.api [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 976.410710] env[62070]: value = "task-1122188" [ 976.410710] env[62070]: _type = "Task" [ 976.410710] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.419040] env[62070]: DEBUG oslo_vmware.api [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122188, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.522337] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.840s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.525837] env[62070]: DEBUG oslo_concurrency.lockutils [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.498s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.526167] env[62070]: DEBUG nova.objects.instance [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lazy-loading 'resources' on Instance uuid 2c58db1d-405f-4489-85db-c74723be4a8d {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 976.546138] env[62070]: INFO nova.scheduler.client.report [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Deleted allocations for instance 519cad6a-ebe0-42db-a19e-27249b83436e [ 976.756095] env[62070]: DEBUG oslo_concurrency.lockutils [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Releasing lock "refresh_cache-cf52cee8-874e-44e8-a36e-49ac20f3e312" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.843562] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122186, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.920903] env[62070]: DEBUG oslo_vmware.api [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122188, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.254603} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.921180] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 976.921397] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 976.921618] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 976.921864] env[62070]: INFO nova.compute.manager [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Took 1.14 seconds to destroy the instance on the hypervisor. [ 976.922119] env[62070]: DEBUG oslo.service.loopingcall [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 976.922332] env[62070]: DEBUG nova.compute.manager [-] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 976.922428] env[62070]: DEBUG nova.network.neutron [-] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 977.057473] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f5253e8c-bb7c-4acc-90f3-1d1ce621f83f tempest-ImagesTestJSON-1351627684 tempest-ImagesTestJSON-1351627684-project-member] Lock "519cad6a-ebe0-42db-a19e-27249b83436e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.218s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.260738] env[62070]: DEBUG oslo_concurrency.lockutils [None req-821abf2e-1097-4d53-905f-a295581ee983 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "interface-cf52cee8-874e-44e8-a36e-49ac20f3e312-60714fe7-f6bc-4f1f-b4d0-a0b45b5a8f70" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.062s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.288606] env[62070]: DEBUG oslo_concurrency.lockutils [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.288916] env[62070]: DEBUG oslo_concurrency.lockutils [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.320221] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf6b058-d0fd-475f-9942-698632a6f2cf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.330431] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6dbfea7-6649-4696-b043-895a3ac17677 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.345952] env[62070]: DEBUG oslo_vmware.api [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122186, 'name': PowerOnVM_Task, 'duration_secs': 0.944114} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.369992] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 977.369992] env[62070]: INFO nova.compute.manager [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Took 8.83 seconds to spawn the instance on the hypervisor. [ 977.369992] env[62070]: DEBUG nova.compute.manager [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 977.371438] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4cd0a7e-cf15-475b-8509-a6c8bebc7634 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.374684] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd6e5a8-7bbb-42f2-a90d-e19629d11643 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.389642] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29892680-d9e5-4ba2-9670-a1ab4294079e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.405871] env[62070]: DEBUG nova.compute.provider_tree [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 977.791044] env[62070]: DEBUG nova.compute.manager [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 977.855403] env[62070]: DEBUG nova.compute.manager [req-627c1d52-7318-48b4-a106-c3da27840ecc req-065eee51-f04b-4aed-81f5-88d1e544dacd service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Received event network-vif-deleted-c06feb60-bfb1-47ea-8764-52391d9b0b78 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 977.855829] env[62070]: INFO nova.compute.manager [req-627c1d52-7318-48b4-a106-c3da27840ecc req-065eee51-f04b-4aed-81f5-88d1e544dacd service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Neutron deleted interface c06feb60-bfb1-47ea-8764-52391d9b0b78; detaching it from the instance and deleting it from the info cache [ 977.855992] env[62070]: DEBUG nova.network.neutron [req-627c1d52-7318-48b4-a106-c3da27840ecc req-065eee51-f04b-4aed-81f5-88d1e544dacd service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.907170] env[62070]: INFO nova.compute.manager [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Took 23.00 seconds to build instance. [ 977.909478] env[62070]: DEBUG nova.scheduler.client.report [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 978.246506] env[62070]: DEBUG nova.network.neutron [-] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.317212] env[62070]: DEBUG oslo_concurrency.lockutils [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.361904] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-26ef17fd-aba4-43a1-8097-c8ef865276cf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.374429] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7466cfed-657d-4983-8fda-bf2617f67966 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.414610] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8cb62e19-b163-4dbc-aa0f-4390ab9e95e3 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "b101c79a-abfd-4104-aaed-096995fb2337" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.531s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.415350] env[62070]: DEBUG oslo_concurrency.lockutils [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.891s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.417666] env[62070]: DEBUG nova.compute.manager [req-627c1d52-7318-48b4-a106-c3da27840ecc req-065eee51-f04b-4aed-81f5-88d1e544dacd service nova] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Detach interface failed, port_id=c06feb60-bfb1-47ea-8764-52391d9b0b78, reason: Instance cf52cee8-874e-44e8-a36e-49ac20f3e312 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 978.418349] env[62070]: DEBUG oslo_concurrency.lockutils [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.522s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.419793] env[62070]: INFO nova.compute.claims [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 978.433301] env[62070]: INFO nova.scheduler.client.report [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Deleted allocations for instance 2c58db1d-405f-4489-85db-c74723be4a8d [ 978.749296] env[62070]: INFO nova.compute.manager [-] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Took 1.83 seconds to deallocate network for instance. [ 978.946056] env[62070]: DEBUG oslo_concurrency.lockutils [None req-de08a2f2-ef2e-4d38-9071-72e4c13cea82 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "2c58db1d-405f-4489-85db-c74723be4a8d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.904s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.187306] env[62070]: DEBUG nova.compute.manager [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Stashing vm_state: active {{(pid=62070) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 979.257526] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 979.691572] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e52c18e1-c3ea-4af9-9bf2-b8479566c44c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.705239] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f0893ab-2dd2-4496-9654-c1c2f0435023 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.709198] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 979.736222] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a38eeec-aae2-4262-a0fa-bccf5a9a1e43 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.745594] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-526221b6-f5a8-412b-a981-eb6cd429ac05 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.761737] env[62070]: DEBUG nova.compute.provider_tree [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 980.264511] env[62070]: DEBUG nova.scheduler.client.report [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 980.400970] env[62070]: DEBUG nova.compute.manager [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Stashing vm_state: active {{(pid=62070) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 980.769928] env[62070]: DEBUG oslo_concurrency.lockutils [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.351s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.771012] env[62070]: DEBUG nova.compute.manager [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 980.774664] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.249s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.779652] env[62070]: INFO nova.compute.claims [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 980.931871] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.289193] env[62070]: DEBUG nova.compute.utils [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 981.292655] env[62070]: DEBUG nova.compute.manager [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 981.292895] env[62070]: DEBUG nova.network.neutron [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 981.340866] env[62070]: DEBUG nova.policy [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6568006b61514963b9c4c4d1b1330c65', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '772f7fcee5f44b899b6df797e1ed5ddd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 981.618218] env[62070]: DEBUG nova.network.neutron [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Successfully created port: 7b9b5e67-62e1-441d-b99d-381c1233f050 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 981.794240] env[62070]: DEBUG nova.compute.manager [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 982.053372] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-642c1299-caa6-4ff3-a5f1-aff313a72134 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.063749] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62bf67eb-c88b-4af4-81d2-13e53ca595fa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.099914] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd3ddbd3-ea8e-480f-9ecd-9da5ee4421bc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.109165] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f39e99-7ca4-4d2e-9105-d681a54ea757 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.125486] env[62070]: DEBUG nova.compute.provider_tree [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 982.628950] env[62070]: DEBUG nova.scheduler.client.report [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 982.807762] env[62070]: DEBUG nova.compute.manager [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 982.845227] env[62070]: DEBUG nova.virt.hardware [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 982.845421] env[62070]: DEBUG nova.virt.hardware [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 982.845696] env[62070]: DEBUG nova.virt.hardware [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 982.845991] env[62070]: DEBUG nova.virt.hardware [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 982.846430] env[62070]: DEBUG nova.virt.hardware [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 982.846581] env[62070]: DEBUG nova.virt.hardware [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 982.846853] env[62070]: DEBUG nova.virt.hardware [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 982.847129] env[62070]: DEBUG nova.virt.hardware [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 982.847356] env[62070]: DEBUG nova.virt.hardware [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 982.847571] env[62070]: DEBUG nova.virt.hardware [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 982.847893] env[62070]: DEBUG nova.virt.hardware [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 982.848840] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73abc827-93f5-448c-9d84-90e8bf1464eb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.858280] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-105a7ec2-11d4-472e-8218-659c828a8098 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.054263] env[62070]: DEBUG nova.compute.manager [req-b7c62198-73ba-4ebf-83f4-11438f15a77d req-9c527ea5-7b8e-483b-a38c-3b0ff9e9b299 service nova] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Received event network-vif-plugged-7b9b5e67-62e1-441d-b99d-381c1233f050 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 983.054488] env[62070]: DEBUG oslo_concurrency.lockutils [req-b7c62198-73ba-4ebf-83f4-11438f15a77d req-9c527ea5-7b8e-483b-a38c-3b0ff9e9b299 service nova] Acquiring lock "e4cf42ff-8440-42bc-b629-4b712fd94e99-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.054707] env[62070]: DEBUG oslo_concurrency.lockutils [req-b7c62198-73ba-4ebf-83f4-11438f15a77d req-9c527ea5-7b8e-483b-a38c-3b0ff9e9b299 service nova] Lock "e4cf42ff-8440-42bc-b629-4b712fd94e99-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.054864] env[62070]: DEBUG oslo_concurrency.lockutils [req-b7c62198-73ba-4ebf-83f4-11438f15a77d req-9c527ea5-7b8e-483b-a38c-3b0ff9e9b299 service nova] Lock "e4cf42ff-8440-42bc-b629-4b712fd94e99-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.055651] env[62070]: DEBUG nova.compute.manager [req-b7c62198-73ba-4ebf-83f4-11438f15a77d req-9c527ea5-7b8e-483b-a38c-3b0ff9e9b299 service nova] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] No waiting events found dispatching network-vif-plugged-7b9b5e67-62e1-441d-b99d-381c1233f050 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 983.055945] env[62070]: WARNING nova.compute.manager [req-b7c62198-73ba-4ebf-83f4-11438f15a77d req-9c527ea5-7b8e-483b-a38c-3b0ff9e9b299 service nova] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Received unexpected event network-vif-plugged-7b9b5e67-62e1-441d-b99d-381c1233f050 for instance with vm_state building and task_state spawning. [ 983.135508] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.361s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.136052] env[62070]: DEBUG nova.compute.manager [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 983.138655] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.082s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.142026] env[62070]: DEBUG nova.objects.instance [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lazy-loading 'resources' on Instance uuid 4a5f644a-1670-4c6b-a762-f87f1ee4cce5 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 983.160503] env[62070]: DEBUG nova.network.neutron [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Successfully updated port: 7b9b5e67-62e1-441d-b99d-381c1233f050 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 983.642700] env[62070]: DEBUG nova.compute.utils [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 983.645563] env[62070]: DEBUG nova.compute.manager [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 983.645867] env[62070]: DEBUG nova.network.neutron [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 983.663341] env[62070]: DEBUG oslo_concurrency.lockutils [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquiring lock "refresh_cache-e4cf42ff-8440-42bc-b629-4b712fd94e99" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.663516] env[62070]: DEBUG oslo_concurrency.lockutils [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquired lock "refresh_cache-e4cf42ff-8440-42bc-b629-4b712fd94e99" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.663855] env[62070]: DEBUG nova.network.neutron [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 983.695225] env[62070]: DEBUG nova.policy [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f866f97eed1a41b39b4cd552102c6e21', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9191f0e6c2ee401abca64c0780e230bf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 983.964870] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-738caf67-f290-4e27-b24c-702c1a3c2337 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.974779] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4461dd-b42a-4cf7-8933-1305adbe1e45 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.012230] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c05fafc-d487-4979-80a7-05c16592ee75 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.022783] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ad1e10-3cb6-45f1-8f0a-5500e0c2543b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.035832] env[62070]: DEBUG nova.compute.provider_tree [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 984.075016] env[62070]: DEBUG nova.network.neutron [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Successfully created port: 2c284e8d-6670-4b5c-b9b9-b279c81efea8 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 984.151738] env[62070]: DEBUG nova.compute.manager [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 984.223535] env[62070]: DEBUG nova.network.neutron [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 984.436220] env[62070]: DEBUG nova.network.neutron [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Updating instance_info_cache with network_info: [{"id": "7b9b5e67-62e1-441d-b99d-381c1233f050", "address": "fa:16:3e:eb:3d:c7", "network": {"id": "6a62b79f-a98b-4518-86cb-facc7b77da1d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2107556336-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "772f7fcee5f44b899b6df797e1ed5ddd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b9b5e67-62", "ovs_interfaceid": "7b9b5e67-62e1-441d-b99d-381c1233f050", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.468437] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0f7be80c-62fb-418e-8b92-cd4fcab4bae0 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "84c00e4a-20d3-4739-8535-e27076d85a89" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.468437] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0f7be80c-62fb-418e-8b92-cd4fcab4bae0 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "84c00e4a-20d3-4739-8535-e27076d85a89" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.540123] env[62070]: DEBUG nova.scheduler.client.report [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 984.942467] env[62070]: DEBUG oslo_concurrency.lockutils [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Releasing lock "refresh_cache-e4cf42ff-8440-42bc-b629-4b712fd94e99" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.942823] env[62070]: DEBUG nova.compute.manager [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Instance network_info: |[{"id": "7b9b5e67-62e1-441d-b99d-381c1233f050", "address": "fa:16:3e:eb:3d:c7", "network": {"id": "6a62b79f-a98b-4518-86cb-facc7b77da1d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2107556336-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "772f7fcee5f44b899b6df797e1ed5ddd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b9b5e67-62", "ovs_interfaceid": "7b9b5e67-62e1-441d-b99d-381c1233f050", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 984.943223] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:3d:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78e1ebb0-0130-446b-bf73-a0e59bbb95cc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7b9b5e67-62e1-441d-b99d-381c1233f050', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 984.952029] env[62070]: DEBUG oslo.service.loopingcall [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 984.952029] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 984.952029] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9666ed03-327a-40da-b3bc-5de5e9b0aa50 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.975730] env[62070]: DEBUG nova.compute.utils [None req-0f7be80c-62fb-418e-8b92-cd4fcab4bae0 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 984.977192] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 984.977192] env[62070]: value = "task-1122189" [ 984.977192] env[62070]: _type = "Task" [ 984.977192] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.991642] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122189, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.045814] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.907s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.048603] env[62070]: DEBUG oslo_concurrency.lockutils [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.732s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.050682] env[62070]: INFO nova.compute.claims [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 985.077933] env[62070]: INFO nova.scheduler.client.report [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Deleted allocations for instance 4a5f644a-1670-4c6b-a762-f87f1ee4cce5 [ 985.162545] env[62070]: DEBUG nova.compute.manager [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 985.193178] env[62070]: DEBUG nova.virt.hardware [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 985.193350] env[62070]: DEBUG nova.virt.hardware [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 985.193403] env[62070]: DEBUG nova.virt.hardware [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 985.193631] env[62070]: DEBUG nova.virt.hardware [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 985.193826] env[62070]: DEBUG nova.virt.hardware [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 985.194051] env[62070]: DEBUG nova.virt.hardware [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 985.195039] env[62070]: DEBUG nova.virt.hardware [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 985.195039] env[62070]: DEBUG nova.virt.hardware [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 985.195039] env[62070]: DEBUG nova.virt.hardware [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 985.195247] env[62070]: DEBUG nova.virt.hardware [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 985.195908] env[62070]: DEBUG nova.virt.hardware [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 985.196484] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6043706-a107-4a27-a55e-54ed2ad964c0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.201646] env[62070]: DEBUG nova.compute.manager [req-45efe647-6d84-40c2-9f1f-e6229f4be723 req-5241d735-b148-457b-b993-4dbf10606797 service nova] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Received event network-changed-7b9b5e67-62e1-441d-b99d-381c1233f050 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 985.201868] env[62070]: DEBUG nova.compute.manager [req-45efe647-6d84-40c2-9f1f-e6229f4be723 req-5241d735-b148-457b-b993-4dbf10606797 service nova] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Refreshing instance network info cache due to event network-changed-7b9b5e67-62e1-441d-b99d-381c1233f050. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 985.202415] env[62070]: DEBUG oslo_concurrency.lockutils [req-45efe647-6d84-40c2-9f1f-e6229f4be723 req-5241d735-b148-457b-b993-4dbf10606797 service nova] Acquiring lock "refresh_cache-e4cf42ff-8440-42bc-b629-4b712fd94e99" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.202590] env[62070]: DEBUG oslo_concurrency.lockutils [req-45efe647-6d84-40c2-9f1f-e6229f4be723 req-5241d735-b148-457b-b993-4dbf10606797 service nova] Acquired lock "refresh_cache-e4cf42ff-8440-42bc-b629-4b712fd94e99" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.202724] env[62070]: DEBUG nova.network.neutron [req-45efe647-6d84-40c2-9f1f-e6229f4be723 req-5241d735-b148-457b-b993-4dbf10606797 service nova] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Refreshing network info cache for port 7b9b5e67-62e1-441d-b99d-381c1233f050 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 985.211600] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e087d91-dd16-42ef-ad67-f7bdb833eec1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.483837] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0f7be80c-62fb-418e-8b92-cd4fcab4bae0 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "84c00e4a-20d3-4739-8535-e27076d85a89" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.014s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.490205] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122189, 'name': CreateVM_Task, 'duration_secs': 0.345309} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.491777] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 985.491777] env[62070]: DEBUG oslo_concurrency.lockutils [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.491777] env[62070]: DEBUG oslo_concurrency.lockutils [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.491978] env[62070]: DEBUG oslo_concurrency.lockutils [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 985.492196] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af30c6bd-9e3a-4ff7-9a32-69eb90c20272 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.499392] env[62070]: DEBUG oslo_vmware.api [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 985.499392] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5282868b-5dd4-05b2-b94b-d685888c26a7" [ 985.499392] env[62070]: _type = "Task" [ 985.499392] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.511101] env[62070]: DEBUG oslo_vmware.api [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5282868b-5dd4-05b2-b94b-d685888c26a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.516747] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquiring lock "b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.517033] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Lock "b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.587487] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e4aeb392-3326-4009-b0d7-1e0b8b5e5799 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "4a5f644a-1670-4c6b-a762-f87f1ee4cce5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.772s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.733400] env[62070]: DEBUG nova.network.neutron [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Successfully updated port: 2c284e8d-6670-4b5c-b9b9-b279c81efea8 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 986.012468] env[62070]: DEBUG oslo_vmware.api [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5282868b-5dd4-05b2-b94b-d685888c26a7, 'name': SearchDatastore_Task, 'duration_secs': 0.023527} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.012755] env[62070]: DEBUG oslo_concurrency.lockutils [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.013310] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 986.013621] env[62070]: DEBUG oslo_concurrency.lockutils [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 986.013803] env[62070]: DEBUG oslo_concurrency.lockutils [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.014017] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 986.014408] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-03383d2e-83e2-4200-9047-693ba6bcd450 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.019534] env[62070]: DEBUG nova.compute.manager [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 986.026019] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 986.026570] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 986.028891] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27481ef7-068f-4361-8908-c47adc8011b5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.037354] env[62070]: DEBUG oslo_vmware.api [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 986.037354] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]527e09bd-f8fe-73e5-1873-213c07861ab4" [ 986.037354] env[62070]: _type = "Task" [ 986.037354] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.048356] env[62070]: DEBUG oslo_vmware.api [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527e09bd-f8fe-73e5-1873-213c07861ab4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.146180] env[62070]: DEBUG nova.network.neutron [req-45efe647-6d84-40c2-9f1f-e6229f4be723 req-5241d735-b148-457b-b993-4dbf10606797 service nova] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Updated VIF entry in instance network info cache for port 7b9b5e67-62e1-441d-b99d-381c1233f050. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 986.146180] env[62070]: DEBUG nova.network.neutron [req-45efe647-6d84-40c2-9f1f-e6229f4be723 req-5241d735-b148-457b-b993-4dbf10606797 service nova] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Updating instance_info_cache with network_info: [{"id": "7b9b5e67-62e1-441d-b99d-381c1233f050", "address": "fa:16:3e:eb:3d:c7", "network": {"id": "6a62b79f-a98b-4518-86cb-facc7b77da1d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2107556336-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "772f7fcee5f44b899b6df797e1ed5ddd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b9b5e67-62", "ovs_interfaceid": "7b9b5e67-62e1-441d-b99d-381c1233f050", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.242744] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "refresh_cache-6cba961f-f9f9-4d3c-853a-049a014c9dbb" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 986.243359] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired lock "refresh_cache-6cba961f-f9f9-4d3c-853a-049a014c9dbb" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.243359] env[62070]: DEBUG nova.network.neutron [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 986.373878] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc6c6e06-5270-4841-b87e-dd1f18e9426d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.389256] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc2dcb2-1d52-4dab-b27a-1282dcedbbba {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.423162] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaf015d0-900d-4ca6-a04d-d989f618c6dd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.432759] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1441fc94-8e6f-424b-beba-7d8a47fea8f7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.449383] env[62070]: DEBUG nova.compute.provider_tree [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 986.543940] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.563048] env[62070]: DEBUG oslo_vmware.api [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527e09bd-f8fe-73e5-1873-213c07861ab4, 'name': SearchDatastore_Task, 'duration_secs': 0.013133} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.563048] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3510a97-6bd8-406e-9c45-d63699370695 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.568240] env[62070]: DEBUG oslo_vmware.api [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 986.568240] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]522099b1-7db6-ec1b-d1d1-fe898d7ea92a" [ 986.568240] env[62070]: _type = "Task" [ 986.568240] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.582528] env[62070]: DEBUG oslo_vmware.api [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]522099b1-7db6-ec1b-d1d1-fe898d7ea92a, 'name': SearchDatastore_Task, 'duration_secs': 0.010243} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.582816] env[62070]: DEBUG oslo_concurrency.lockutils [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.583059] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] e4cf42ff-8440-42bc-b629-4b712fd94e99/e4cf42ff-8440-42bc-b629-4b712fd94e99.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 986.583327] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-61bd5d64-a30b-48c8-b03a-4ae700a3de0c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.591549] env[62070]: DEBUG oslo_vmware.api [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 986.591549] env[62070]: value = "task-1122190" [ 986.591549] env[62070]: _type = "Task" [ 986.591549] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.601171] env[62070]: DEBUG oslo_vmware.api [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122190, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.618025] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0f7be80c-62fb-418e-8b92-cd4fcab4bae0 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "84c00e4a-20d3-4739-8535-e27076d85a89" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.618264] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0f7be80c-62fb-418e-8b92-cd4fcab4bae0 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "84c00e4a-20d3-4739-8535-e27076d85a89" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.618519] env[62070]: INFO nova.compute.manager [None req-0f7be80c-62fb-418e-8b92-cd4fcab4bae0 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Attaching volume 773d984d-4185-4716-a44d-6527016a9c86 to /dev/sdb [ 986.648863] env[62070]: DEBUG oslo_concurrency.lockutils [req-45efe647-6d84-40c2-9f1f-e6229f4be723 req-5241d735-b148-457b-b993-4dbf10606797 service nova] Releasing lock "refresh_cache-e4cf42ff-8440-42bc-b629-4b712fd94e99" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.675341] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd026e4-31c1-4a0c-85d0-079dfef4b4f7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.684106] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1554a76-2d83-4163-b46a-14bd3aae614c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.703149] env[62070]: DEBUG nova.virt.block_device [None req-0f7be80c-62fb-418e-8b92-cd4fcab4bae0 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Updating existing volume attachment record: e68fe479-8471-4fbc-a489-5e0b2660225e {{(pid=62070) _volume_attach /opt/stack/nova/nova/virt/block_device.py:679}} [ 986.789578] env[62070]: DEBUG nova.network.neutron [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 986.950145] env[62070]: DEBUG nova.network.neutron [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Updating instance_info_cache with network_info: [{"id": "2c284e8d-6670-4b5c-b9b9-b279c81efea8", "address": "fa:16:3e:a6:55:17", "network": {"id": "5ea0fffc-372c-450e-b27b-10959077d58f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1853458988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9191f0e6c2ee401abca64c0780e230bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3c995e9-7f2f-420c-880a-d60da6e708ad", "external-id": "nsx-vlan-transportzone-166", "segmentation_id": 166, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c284e8d-66", "ovs_interfaceid": "2c284e8d-6670-4b5c-b9b9-b279c81efea8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.953252] env[62070]: DEBUG nova.scheduler.client.report [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 987.102701] env[62070]: DEBUG oslo_vmware.api [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122190, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.229606] env[62070]: DEBUG nova.compute.manager [req-8bc19c12-0a54-44c4-8217-8642bd452f18 req-d69707b8-0fc2-4c1f-b277-f33747c90baa service nova] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Received event network-vif-plugged-2c284e8d-6670-4b5c-b9b9-b279c81efea8 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 987.230297] env[62070]: DEBUG oslo_concurrency.lockutils [req-8bc19c12-0a54-44c4-8217-8642bd452f18 req-d69707b8-0fc2-4c1f-b277-f33747c90baa service nova] Acquiring lock "6cba961f-f9f9-4d3c-853a-049a014c9dbb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.230297] env[62070]: DEBUG oslo_concurrency.lockutils [req-8bc19c12-0a54-44c4-8217-8642bd452f18 req-d69707b8-0fc2-4c1f-b277-f33747c90baa service nova] Lock "6cba961f-f9f9-4d3c-853a-049a014c9dbb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.230297] env[62070]: DEBUG oslo_concurrency.lockutils [req-8bc19c12-0a54-44c4-8217-8642bd452f18 req-d69707b8-0fc2-4c1f-b277-f33747c90baa service nova] Lock "6cba961f-f9f9-4d3c-853a-049a014c9dbb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.230552] env[62070]: DEBUG nova.compute.manager [req-8bc19c12-0a54-44c4-8217-8642bd452f18 req-d69707b8-0fc2-4c1f-b277-f33747c90baa service nova] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] No waiting events found dispatching network-vif-plugged-2c284e8d-6670-4b5c-b9b9-b279c81efea8 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 987.230624] env[62070]: WARNING nova.compute.manager [req-8bc19c12-0a54-44c4-8217-8642bd452f18 req-d69707b8-0fc2-4c1f-b277-f33747c90baa service nova] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Received unexpected event network-vif-plugged-2c284e8d-6670-4b5c-b9b9-b279c81efea8 for instance with vm_state building and task_state spawning. [ 987.230879] env[62070]: DEBUG nova.compute.manager [req-8bc19c12-0a54-44c4-8217-8642bd452f18 req-d69707b8-0fc2-4c1f-b277-f33747c90baa service nova] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Received event network-changed-2c284e8d-6670-4b5c-b9b9-b279c81efea8 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 987.230944] env[62070]: DEBUG nova.compute.manager [req-8bc19c12-0a54-44c4-8217-8642bd452f18 req-d69707b8-0fc2-4c1f-b277-f33747c90baa service nova] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Refreshing instance network info cache due to event network-changed-2c284e8d-6670-4b5c-b9b9-b279c81efea8. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 987.231137] env[62070]: DEBUG oslo_concurrency.lockutils [req-8bc19c12-0a54-44c4-8217-8642bd452f18 req-d69707b8-0fc2-4c1f-b277-f33747c90baa service nova] Acquiring lock "refresh_cache-6cba961f-f9f9-4d3c-853a-049a014c9dbb" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.457323] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Releasing lock "refresh_cache-6cba961f-f9f9-4d3c-853a-049a014c9dbb" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 987.457690] env[62070]: DEBUG nova.compute.manager [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Instance network_info: |[{"id": "2c284e8d-6670-4b5c-b9b9-b279c81efea8", "address": "fa:16:3e:a6:55:17", "network": {"id": "5ea0fffc-372c-450e-b27b-10959077d58f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1853458988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9191f0e6c2ee401abca64c0780e230bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3c995e9-7f2f-420c-880a-d60da6e708ad", "external-id": "nsx-vlan-transportzone-166", "segmentation_id": 166, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c284e8d-66", "ovs_interfaceid": "2c284e8d-6670-4b5c-b9b9-b279c81efea8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 987.458402] env[62070]: DEBUG oslo_concurrency.lockutils [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.410s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.458888] env[62070]: DEBUG nova.compute.manager [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 987.461480] env[62070]: DEBUG oslo_concurrency.lockutils [req-8bc19c12-0a54-44c4-8217-8642bd452f18 req-d69707b8-0fc2-4c1f-b277-f33747c90baa service nova] Acquired lock "refresh_cache-6cba961f-f9f9-4d3c-853a-049a014c9dbb" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.461624] env[62070]: DEBUG nova.network.neutron [req-8bc19c12-0a54-44c4-8217-8642bd452f18 req-d69707b8-0fc2-4c1f-b277-f33747c90baa service nova] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Refreshing network info cache for port 2c284e8d-6670-4b5c-b9b9-b279c81efea8 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 987.463022] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:55:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3c995e9-7f2f-420c-880a-d60da6e708ad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c284e8d-6670-4b5c-b9b9-b279c81efea8', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 987.470416] env[62070]: DEBUG oslo.service.loopingcall [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 987.470625] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.213s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.470845] env[62070]: DEBUG nova.objects.instance [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lazy-loading 'resources' on Instance uuid cf52cee8-874e-44e8-a36e-49ac20f3e312 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 987.472594] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 987.472973] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-48594198-fdf6-4f25-86fe-03aa519c92db {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.494234] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 987.494234] env[62070]: value = "task-1122194" [ 987.494234] env[62070]: _type = "Task" [ 987.494234] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.503768] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122194, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.604083] env[62070]: DEBUG oslo_vmware.api [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122190, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.663884} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.605029] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] e4cf42ff-8440-42bc-b629-4b712fd94e99/e4cf42ff-8440-42bc-b629-4b712fd94e99.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 987.605029] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 987.605029] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-28d9c125-185b-4c9f-bff2-1b22d9ebf898 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.612199] env[62070]: DEBUG oslo_vmware.api [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 987.612199] env[62070]: value = "task-1122195" [ 987.612199] env[62070]: _type = "Task" [ 987.612199] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.620958] env[62070]: DEBUG oslo_vmware.api [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122195, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.974024] env[62070]: DEBUG nova.compute.utils [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 987.980281] env[62070]: DEBUG nova.compute.manager [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 987.980458] env[62070]: DEBUG nova.network.neutron [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 988.009702] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122194, 'name': CreateVM_Task, 'duration_secs': 0.409427} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.011998] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 988.015272] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.015434] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.015795] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 988.016491] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aad7da7d-45a4-490e-b2bc-1fb174add08d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.021844] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 988.021844] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5286d7f0-5abf-40ab-e642-784e3364139b" [ 988.021844] env[62070]: _type = "Task" [ 988.021844] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.026132] env[62070]: DEBUG nova.policy [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'db9baf29d0b5489da2657286bfd695c0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91e246e32f29422e90fae974cfee9d8f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 988.036739] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5286d7f0-5abf-40ab-e642-784e3364139b, 'name': SearchDatastore_Task, 'duration_secs': 0.011767} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.037560] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.037560] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 988.038113] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.038113] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.038249] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 988.038526] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8ef9c17c-01d5-4bce-ad8d-9b83db3d5615 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.051310] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 988.051504] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 988.052594] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67fdf510-4efd-4553-91f7-26a2d61fd583 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.064124] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 988.064124] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52b8e0b2-7bed-0cff-4ce2-ce2caf3badc5" [ 988.064124] env[62070]: _type = "Task" [ 988.064124] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.073242] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52b8e0b2-7bed-0cff-4ce2-ce2caf3badc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.125710] env[62070]: DEBUG oslo_vmware.api [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122195, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074802} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.125710] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 988.125710] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eebc9c5f-50b5-4bf7-a371-5920f4e7c17c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.149713] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] e4cf42ff-8440-42bc-b629-4b712fd94e99/e4cf42ff-8440-42bc-b629-4b712fd94e99.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 988.152606] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2aa3be3-e84c-4001-ab4a-407c8e968306 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.174045] env[62070]: DEBUG oslo_vmware.api [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 988.174045] env[62070]: value = "task-1122196" [ 988.174045] env[62070]: _type = "Task" [ 988.174045] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.186239] env[62070]: DEBUG oslo_vmware.api [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122196, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.235452] env[62070]: DEBUG nova.network.neutron [req-8bc19c12-0a54-44c4-8217-8642bd452f18 req-d69707b8-0fc2-4c1f-b277-f33747c90baa service nova] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Updated VIF entry in instance network info cache for port 2c284e8d-6670-4b5c-b9b9-b279c81efea8. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 988.235732] env[62070]: DEBUG nova.network.neutron [req-8bc19c12-0a54-44c4-8217-8642bd452f18 req-d69707b8-0fc2-4c1f-b277-f33747c90baa service nova] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Updating instance_info_cache with network_info: [{"id": "2c284e8d-6670-4b5c-b9b9-b279c81efea8", "address": "fa:16:3e:a6:55:17", "network": {"id": "5ea0fffc-372c-450e-b27b-10959077d58f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1853458988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9191f0e6c2ee401abca64c0780e230bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3c995e9-7f2f-420c-880a-d60da6e708ad", "external-id": "nsx-vlan-transportzone-166", "segmentation_id": 166, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c284e8d-66", "ovs_interfaceid": "2c284e8d-6670-4b5c-b9b9-b279c81efea8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.333909] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd38134-bfc8-431a-9237-a8ebaecf19a8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.342096] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eadc239-b398-4d19-866d-4995f6758bc2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.379700] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "5cccd79d-d243-49db-8581-718dd594f3b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.380018] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "5cccd79d-d243-49db-8581-718dd594f3b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.382126] env[62070]: DEBUG nova.network.neutron [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Successfully created port: 0b063dd9-e920-4bb9-88bf-9f98085170bb {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 988.384463] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aca7b2e-e549-4990-9f50-e9c0fbc259fe {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.395152] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d686c120-bb14-4da3-a976-280d1034aa64 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.411339] env[62070]: DEBUG nova.compute.provider_tree [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 988.479816] env[62070]: DEBUG nova.compute.manager [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 988.575105] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52b8e0b2-7bed-0cff-4ce2-ce2caf3badc5, 'name': SearchDatastore_Task, 'duration_secs': 0.00994} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.575944] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb9c7b56-b307-41f5-bbb6-fbfb2c518cba {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.581445] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 988.581445] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5200cef2-0a45-6d2c-fe05-dfc0c6ccef42" [ 988.581445] env[62070]: _type = "Task" [ 988.581445] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.589722] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5200cef2-0a45-6d2c-fe05-dfc0c6ccef42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.686570] env[62070]: DEBUG oslo_vmware.api [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122196, 'name': ReconfigVM_Task, 'duration_secs': 0.301531} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.686917] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Reconfigured VM instance instance-0000005b to attach disk [datastore1] e4cf42ff-8440-42bc-b629-4b712fd94e99/e4cf42ff-8440-42bc-b629-4b712fd94e99.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 988.687556] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ed98fd16-5ef2-4d1e-834d-7c3ccff82513 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.695906] env[62070]: DEBUG oslo_vmware.api [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 988.695906] env[62070]: value = "task-1122197" [ 988.695906] env[62070]: _type = "Task" [ 988.695906] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.704467] env[62070]: DEBUG oslo_vmware.api [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122197, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.741382] env[62070]: DEBUG oslo_concurrency.lockutils [req-8bc19c12-0a54-44c4-8217-8642bd452f18 req-d69707b8-0fc2-4c1f-b277-f33747c90baa service nova] Releasing lock "refresh_cache-6cba961f-f9f9-4d3c-853a-049a014c9dbb" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.888265] env[62070]: DEBUG nova.compute.manager [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 988.931044] env[62070]: ERROR nova.scheduler.client.report [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [req-bb7231ab-abe0-4aab-b1fc-2a962d2b27b5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 21c7c111-1b69-4468-b2c4-5dd96014fbd6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bb7231ab-abe0-4aab-b1fc-2a962d2b27b5"}]} [ 988.949280] env[62070]: DEBUG nova.scheduler.client.report [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Refreshing inventories for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 988.963032] env[62070]: DEBUG nova.scheduler.client.report [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Updating ProviderTree inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 988.963032] env[62070]: DEBUG nova.compute.provider_tree [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 988.974930] env[62070]: DEBUG nova.scheduler.client.report [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Refreshing aggregate associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, aggregates: None {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 988.995206] env[62070]: DEBUG nova.scheduler.client.report [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Refreshing trait associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 989.092933] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5200cef2-0a45-6d2c-fe05-dfc0c6ccef42, 'name': SearchDatastore_Task, 'duration_secs': 0.009775} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.095290] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.095580] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 6cba961f-f9f9-4d3c-853a-049a014c9dbb/6cba961f-f9f9-4d3c-853a-049a014c9dbb.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 989.096113] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1907f0dc-5ec4-4fe3-82da-47075771e7b1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.103017] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 989.103017] env[62070]: value = "task-1122198" [ 989.103017] env[62070]: _type = "Task" [ 989.103017] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.117260] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122198, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.206166] env[62070]: DEBUG oslo_vmware.api [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122197, 'name': Rename_Task, 'duration_secs': 0.148499} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.206541] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 989.206874] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-df64d906-4d2d-4231-9369-059ea5252bb0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.214909] env[62070]: DEBUG oslo_vmware.api [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 989.214909] env[62070]: value = "task-1122200" [ 989.214909] env[62070]: _type = "Task" [ 989.214909] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.223418] env[62070]: DEBUG oslo_vmware.api [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122200, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.263248] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad809f1-0fe2-478d-8bd1-84b3c969e66b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.272730] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf8dde7e-594c-41ab-afc0-037104f93e2b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.306952] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab665650-83c1-49c3-a67e-4ba78a6bcd8d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.316639] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-538f2597-1e67-4c72-a7b0-174493eb4087 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.334144] env[62070]: DEBUG nova.compute.provider_tree [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 989.408666] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.489066] env[62070]: DEBUG nova.compute.manager [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 989.519507] env[62070]: DEBUG nova.virt.hardware [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 989.519803] env[62070]: DEBUG nova.virt.hardware [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 989.519989] env[62070]: DEBUG nova.virt.hardware [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 989.520200] env[62070]: DEBUG nova.virt.hardware [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 989.520369] env[62070]: DEBUG nova.virt.hardware [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 989.520525] env[62070]: DEBUG nova.virt.hardware [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 989.520756] env[62070]: DEBUG nova.virt.hardware [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 989.520943] env[62070]: DEBUG nova.virt.hardware [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 989.521164] env[62070]: DEBUG nova.virt.hardware [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 989.521362] env[62070]: DEBUG nova.virt.hardware [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 989.521568] env[62070]: DEBUG nova.virt.hardware [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 989.522527] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88decc44-d5d3-4a7d-9ce1-4e9fe21554df {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.533636] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6f0c4f-7d4e-437c-a039-c0ded39d1fdf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.613297] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122198, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485773} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.613565] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 6cba961f-f9f9-4d3c-853a-049a014c9dbb/6cba961f-f9f9-4d3c-853a-049a014c9dbb.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 989.613783] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 989.614043] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-333c2032-1534-4526-a912-28f886ca9761 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.622208] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 989.622208] env[62070]: value = "task-1122201" [ 989.622208] env[62070]: _type = "Task" [ 989.622208] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.630594] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122201, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.726752] env[62070]: DEBUG oslo_vmware.api [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122200, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.789024] env[62070]: DEBUG nova.compute.manager [req-d5f4e654-c18f-4d9b-8c48-81871cc4adf7 req-1b965af0-f419-407f-8939-abb050543b84 service nova] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Received event network-vif-plugged-0b063dd9-e920-4bb9-88bf-9f98085170bb {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 989.789232] env[62070]: DEBUG oslo_concurrency.lockutils [req-d5f4e654-c18f-4d9b-8c48-81871cc4adf7 req-1b965af0-f419-407f-8939-abb050543b84 service nova] Acquiring lock "f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.789474] env[62070]: DEBUG oslo_concurrency.lockutils [req-d5f4e654-c18f-4d9b-8c48-81871cc4adf7 req-1b965af0-f419-407f-8939-abb050543b84 service nova] Lock "f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.789632] env[62070]: DEBUG oslo_concurrency.lockutils [req-d5f4e654-c18f-4d9b-8c48-81871cc4adf7 req-1b965af0-f419-407f-8939-abb050543b84 service nova] Lock "f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.789811] env[62070]: DEBUG nova.compute.manager [req-d5f4e654-c18f-4d9b-8c48-81871cc4adf7 req-1b965af0-f419-407f-8939-abb050543b84 service nova] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] No waiting events found dispatching network-vif-plugged-0b063dd9-e920-4bb9-88bf-9f98085170bb {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 989.789987] env[62070]: WARNING nova.compute.manager [req-d5f4e654-c18f-4d9b-8c48-81871cc4adf7 req-1b965af0-f419-407f-8939-abb050543b84 service nova] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Received unexpected event network-vif-plugged-0b063dd9-e920-4bb9-88bf-9f98085170bb for instance with vm_state building and task_state spawning. [ 989.870955] env[62070]: DEBUG nova.scheduler.client.report [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Updated inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with generation 132 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 989.871282] env[62070]: DEBUG nova.compute.provider_tree [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Updating resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 generation from 132 to 133 during operation: update_inventory {{(pid=62070) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 989.871471] env[62070]: DEBUG nova.compute.provider_tree [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 989.882339] env[62070]: DEBUG nova.network.neutron [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Successfully updated port: 0b063dd9-e920-4bb9-88bf-9f98085170bb {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 990.131803] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122201, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075128} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.132109] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 990.132891] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b548bc-c0f2-4855-ac29-4c4c428a6cbf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.154656] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] 6cba961f-f9f9-4d3c-853a-049a014c9dbb/6cba961f-f9f9-4d3c-853a-049a014c9dbb.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 990.155264] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3994da7a-043a-4e6b-b3a6-b5131fec8d36 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.176404] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 990.176404] env[62070]: value = "task-1122202" [ 990.176404] env[62070]: _type = "Task" [ 990.176404] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.184700] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122202, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.226240] env[62070]: DEBUG oslo_vmware.api [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122200, 'name': PowerOnVM_Task, 'duration_secs': 0.550612} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.226539] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 990.226794] env[62070]: INFO nova.compute.manager [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Took 7.42 seconds to spawn the instance on the hypervisor. [ 990.227028] env[62070]: DEBUG nova.compute.manager [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 990.227841] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce13551-1c3c-4418-a0fe-252e9ed682ab {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.376899] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.906s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.379395] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 10.670s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.384017] env[62070]: DEBUG oslo_concurrency.lockutils [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "refresh_cache-f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 990.384153] env[62070]: DEBUG oslo_concurrency.lockutils [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired lock "refresh_cache-f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.384293] env[62070]: DEBUG nova.network.neutron [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 990.399255] env[62070]: INFO nova.scheduler.client.report [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Deleted allocations for instance cf52cee8-874e-44e8-a36e-49ac20f3e312 [ 990.687945] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122202, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.745981] env[62070]: INFO nova.compute.manager [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Took 19.87 seconds to build instance. [ 990.887026] env[62070]: INFO nova.compute.claims [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 990.906156] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e024cd23-3abe-484c-b9ec-a73746a38127 tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "cf52cee8-874e-44e8-a36e-49ac20f3e312" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.133s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.918118] env[62070]: DEBUG nova.network.neutron [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 991.090865] env[62070]: DEBUG nova.network.neutron [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Updating instance_info_cache with network_info: [{"id": "0b063dd9-e920-4bb9-88bf-9f98085170bb", "address": "fa:16:3e:57:b6:af", "network": {"id": "516790be-56b8-409d-b1c0-a8683a45a9ec", "bridge": "br-int", "label": "tempest-ServersTestJSON-693737631-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91e246e32f29422e90fae974cfee9d8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b063dd9-e9", "ovs_interfaceid": "0b063dd9-e920-4bb9-88bf-9f98085170bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.138939] env[62070]: DEBUG oslo_concurrency.lockutils [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquiring lock "e4cf42ff-8440-42bc-b629-4b712fd94e99" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.189166] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122202, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.248402] env[62070]: DEBUG oslo_concurrency.lockutils [None req-54b03663-a039-4755-80d7-40a200b8e0db tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "e4cf42ff-8440-42bc-b629-4b712fd94e99" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.386s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.248402] env[62070]: DEBUG oslo_concurrency.lockutils [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "e4cf42ff-8440-42bc-b629-4b712fd94e99" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.109s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.248402] env[62070]: DEBUG oslo_concurrency.lockutils [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquiring lock "e4cf42ff-8440-42bc-b629-4b712fd94e99-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.248402] env[62070]: DEBUG oslo_concurrency.lockutils [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "e4cf42ff-8440-42bc-b629-4b712fd94e99-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.248819] env[62070]: DEBUG oslo_concurrency.lockutils [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "e4cf42ff-8440-42bc-b629-4b712fd94e99-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.250934] env[62070]: INFO nova.compute.manager [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Terminating instance [ 991.253271] env[62070]: DEBUG nova.compute.manager [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 991.253523] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 991.254413] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da1d1124-e86b-4548-8217-63d115a4a4b4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.258343] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f7be80c-62fb-418e-8b92-cd4fcab4bae0 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Volume attach. Driver type: vmdk {{(pid=62070) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 991.258530] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f7be80c-62fb-418e-8b92-cd4fcab4bae0 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245480', 'volume_id': '773d984d-4185-4716-a44d-6527016a9c86', 'name': 'volume-773d984d-4185-4716-a44d-6527016a9c86', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '84c00e4a-20d3-4739-8535-e27076d85a89', 'attached_at': '', 'detached_at': '', 'volume_id': '773d984d-4185-4716-a44d-6527016a9c86', 'serial': '773d984d-4185-4716-a44d-6527016a9c86'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 991.259300] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da3cb57-28fc-4f51-a1e8-8c62f9a65df7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.277379] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 991.278136] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-01ddb80f-e992-4bf9-999d-16483586b71c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.280086] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfaab7fe-e592-4365-b670-ed851dae80c8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.309207] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f7be80c-62fb-418e-8b92-cd4fcab4bae0 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] volume-773d984d-4185-4716-a44d-6527016a9c86/volume-773d984d-4185-4716-a44d-6527016a9c86.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 991.311021] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05430b95-d918-4ce0-bdf0-34d3dda247fe {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.324611] env[62070]: DEBUG oslo_vmware.api [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 991.324611] env[62070]: value = "task-1122203" [ 991.324611] env[62070]: _type = "Task" [ 991.324611] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.331258] env[62070]: DEBUG oslo_vmware.api [None req-0f7be80c-62fb-418e-8b92-cd4fcab4bae0 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 991.331258] env[62070]: value = "task-1122204" [ 991.331258] env[62070]: _type = "Task" [ 991.331258] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.334322] env[62070]: DEBUG oslo_vmware.api [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122203, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.342876] env[62070]: DEBUG oslo_vmware.api [None req-0f7be80c-62fb-418e-8b92-cd4fcab4bae0 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122204, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.394496] env[62070]: INFO nova.compute.resource_tracker [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Updating resource usage from migration bc687c47-1f98-4813-8b35-ebea64e5ada5 [ 991.508709] env[62070]: DEBUG oslo_concurrency.lockutils [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "21bcb1a6-833b-48f3-8ee2-0e49c64a104f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.508994] env[62070]: DEBUG oslo_concurrency.lockutils [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "21bcb1a6-833b-48f3-8ee2-0e49c64a104f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.509236] env[62070]: DEBUG oslo_concurrency.lockutils [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "21bcb1a6-833b-48f3-8ee2-0e49c64a104f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.509453] env[62070]: DEBUG oslo_concurrency.lockutils [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "21bcb1a6-833b-48f3-8ee2-0e49c64a104f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.509704] env[62070]: DEBUG oslo_concurrency.lockutils [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "21bcb1a6-833b-48f3-8ee2-0e49c64a104f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.514495] env[62070]: INFO nova.compute.manager [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Terminating instance [ 991.516515] env[62070]: DEBUG nova.compute.manager [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 991.516766] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 991.517641] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-488c6000-cf12-4c9b-b985-da23d9d4eb11 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.526249] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 991.528912] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a7bc26fd-1655-4718-a8f7-9f5fb9f4c870 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.536787] env[62070]: DEBUG oslo_vmware.api [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 991.536787] env[62070]: value = "task-1122205" [ 991.536787] env[62070]: _type = "Task" [ 991.536787] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.560992] env[62070]: DEBUG oslo_vmware.api [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122205, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.593022] env[62070]: DEBUG oslo_concurrency.lockutils [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Releasing lock "refresh_cache-f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 991.593383] env[62070]: DEBUG nova.compute.manager [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Instance network_info: |[{"id": "0b063dd9-e920-4bb9-88bf-9f98085170bb", "address": "fa:16:3e:57:b6:af", "network": {"id": "516790be-56b8-409d-b1c0-a8683a45a9ec", "bridge": "br-int", "label": "tempest-ServersTestJSON-693737631-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91e246e32f29422e90fae974cfee9d8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b063dd9-e9", "ovs_interfaceid": "0b063dd9-e920-4bb9-88bf-9f98085170bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 991.594096] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:b6:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '359850cc-b061-4c9c-a61c-eb42e0f7c359', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0b063dd9-e920-4bb9-88bf-9f98085170bb', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 991.601614] env[62070]: DEBUG oslo.service.loopingcall [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 991.601845] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 991.602080] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-120b8fd3-aebe-45f1-8e67-dc96b4f276af {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.624632] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 991.624632] env[62070]: value = "task-1122206" [ 991.624632] env[62070]: _type = "Task" [ 991.624632] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.632590] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122206, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.672132] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db160d1-1c6f-406f-b931-7ce2c7a5bdc7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.683777] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1112b7c-a380-4bc3-a637-22f36695ec6c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.693567] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122202, 'name': ReconfigVM_Task, 'duration_secs': 1.070772} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.718834] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Reconfigured VM instance instance-0000005c to attach disk [datastore1] 6cba961f-f9f9-4d3c-853a-049a014c9dbb/6cba961f-f9f9-4d3c-853a-049a014c9dbb.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 991.719857] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b4849e6b-a058-4c29-8426-c108c817a970 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.722348] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d56f85-1799-4c01-bf61-8e5c6235435a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.733689] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fefb8181-287e-4ccd-8912-6d0f48e15e9d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.738336] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 991.738336] env[62070]: value = "task-1122207" [ 991.738336] env[62070]: _type = "Task" [ 991.738336] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.752774] env[62070]: DEBUG nova.compute.provider_tree [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 991.761497] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122207, 'name': Rename_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.817797] env[62070]: DEBUG nova.compute.manager [req-56225b41-ac75-40c8-abad-08a0d7a9c33c req-d9cb0957-6fca-4332-b16a-462a8187e931 service nova] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Received event network-changed-0b063dd9-e920-4bb9-88bf-9f98085170bb {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 991.817797] env[62070]: DEBUG nova.compute.manager [req-56225b41-ac75-40c8-abad-08a0d7a9c33c req-d9cb0957-6fca-4332-b16a-462a8187e931 service nova] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Refreshing instance network info cache due to event network-changed-0b063dd9-e920-4bb9-88bf-9f98085170bb. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 991.817966] env[62070]: DEBUG oslo_concurrency.lockutils [req-56225b41-ac75-40c8-abad-08a0d7a9c33c req-d9cb0957-6fca-4332-b16a-462a8187e931 service nova] Acquiring lock "refresh_cache-f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.818139] env[62070]: DEBUG oslo_concurrency.lockutils [req-56225b41-ac75-40c8-abad-08a0d7a9c33c req-d9cb0957-6fca-4332-b16a-462a8187e931 service nova] Acquired lock "refresh_cache-f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.818312] env[62070]: DEBUG nova.network.neutron [req-56225b41-ac75-40c8-abad-08a0d7a9c33c req-d9cb0957-6fca-4332-b16a-462a8187e931 service nova] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Refreshing network info cache for port 0b063dd9-e920-4bb9-88bf-9f98085170bb {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 991.836366] env[62070]: DEBUG oslo_vmware.api [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122203, 'name': PowerOffVM_Task, 'duration_secs': 0.170194} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.839614] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 991.839806] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 991.840371] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-661ab65d-e2b4-43dc-a83c-2b30cc2c8c2d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.847653] env[62070]: DEBUG oslo_vmware.api [None req-0f7be80c-62fb-418e-8b92-cd4fcab4bae0 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122204, 'name': ReconfigVM_Task, 'duration_secs': 0.399346} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.847960] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f7be80c-62fb-418e-8b92-cd4fcab4bae0 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Reconfigured VM instance instance-00000053 to attach disk [datastore1] volume-773d984d-4185-4716-a44d-6527016a9c86/volume-773d984d-4185-4716-a44d-6527016a9c86.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 991.853159] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d983372e-e395-4d2c-aed5-72ed2f9a4e88 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.871571] env[62070]: DEBUG oslo_vmware.api [None req-0f7be80c-62fb-418e-8b92-cd4fcab4bae0 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 991.871571] env[62070]: value = "task-1122209" [ 991.871571] env[62070]: _type = "Task" [ 991.871571] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.886028] env[62070]: DEBUG oslo_vmware.api [None req-0f7be80c-62fb-418e-8b92-cd4fcab4bae0 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122209, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.047492] env[62070]: DEBUG oslo_vmware.api [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122205, 'name': PowerOffVM_Task, 'duration_secs': 0.246347} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.047959] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 992.048206] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 992.048506] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e4d3e602-a95f-4962-893d-4f63e2ae7de6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.119401] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 992.119668] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 992.119815] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Deleting the datastore file [datastore2] 21bcb1a6-833b-48f3-8ee2-0e49c64a104f {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 992.120096] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b824802-32ac-4bcb-b603-905406765748 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.132060] env[62070]: DEBUG oslo_vmware.api [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for the task: (returnval){ [ 992.132060] env[62070]: value = "task-1122211" [ 992.132060] env[62070]: _type = "Task" [ 992.132060] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.203746] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122206, 'name': CreateVM_Task, 'duration_secs': 0.3692} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.203746] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 992.203746] env[62070]: DEBUG oslo_concurrency.lockutils [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 992.203746] env[62070]: DEBUG oslo_concurrency.lockutils [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.203746] env[62070]: DEBUG oslo_concurrency.lockutils [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 992.203746] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c3bf7a6-9f7a-4e2b-947f-448e54479f72 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.203746] env[62070]: DEBUG oslo_vmware.api [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122211, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.203746] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 992.203746] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52c7e3e8-b3b7-3b9c-64c5-ee6fc295adea" [ 992.203746] env[62070]: _type = "Task" [ 992.203746] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.203746] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52c7e3e8-b3b7-3b9c-64c5-ee6fc295adea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.249377] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122207, 'name': Rename_Task, 'duration_secs': 0.17281} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.249744] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 992.250019] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-41db491e-8aef-4e3a-ac01-790908f16f5d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.261594] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 992.261594] env[62070]: value = "task-1122212" [ 992.261594] env[62070]: _type = "Task" [ 992.261594] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.273996] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122212, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.277542] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 992.278085] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 992.278362] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Deleting the datastore file [datastore1] e4cf42ff-8440-42bc-b629-4b712fd94e99 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 992.278656] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-94d6ec1f-ef2e-45e7-92c5-841772fa6462 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.287151] env[62070]: DEBUG oslo_vmware.api [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 992.287151] env[62070]: value = "task-1122213" [ 992.287151] env[62070]: _type = "Task" [ 992.287151] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.293897] env[62070]: DEBUG nova.scheduler.client.report [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Updated inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with generation 133 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 992.294183] env[62070]: DEBUG nova.compute.provider_tree [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Updating resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 generation from 133 to 134 during operation: update_inventory {{(pid=62070) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 992.294370] env[62070]: DEBUG nova.compute.provider_tree [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 992.301358] env[62070]: DEBUG oslo_vmware.api [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122213, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.383250] env[62070]: DEBUG oslo_vmware.api [None req-0f7be80c-62fb-418e-8b92-cd4fcab4bae0 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122209, 'name': ReconfigVM_Task, 'duration_secs': 0.176302} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.383471] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f7be80c-62fb-418e-8b92-cd4fcab4bae0 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245480', 'volume_id': '773d984d-4185-4716-a44d-6527016a9c86', 'name': 'volume-773d984d-4185-4716-a44d-6527016a9c86', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '84c00e4a-20d3-4739-8535-e27076d85a89', 'attached_at': '', 'detached_at': '', 'volume_id': '773d984d-4185-4716-a44d-6527016a9c86', 'serial': '773d984d-4185-4716-a44d-6527016a9c86'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 992.568201] env[62070]: DEBUG nova.network.neutron [req-56225b41-ac75-40c8-abad-08a0d7a9c33c req-d9cb0957-6fca-4332-b16a-462a8187e931 service nova] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Updated VIF entry in instance network info cache for port 0b063dd9-e920-4bb9-88bf-9f98085170bb. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 992.568586] env[62070]: DEBUG nova.network.neutron [req-56225b41-ac75-40c8-abad-08a0d7a9c33c req-d9cb0957-6fca-4332-b16a-462a8187e931 service nova] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Updating instance_info_cache with network_info: [{"id": "0b063dd9-e920-4bb9-88bf-9f98085170bb", "address": "fa:16:3e:57:b6:af", "network": {"id": "516790be-56b8-409d-b1c0-a8683a45a9ec", "bridge": "br-int", "label": "tempest-ServersTestJSON-693737631-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91e246e32f29422e90fae974cfee9d8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b063dd9-e9", "ovs_interfaceid": "0b063dd9-e920-4bb9-88bf-9f98085170bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.642633] env[62070]: DEBUG oslo_vmware.api [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Task: {'id': task-1122211, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164954} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.643183] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 992.643387] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 992.643569] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 992.643747] env[62070]: INFO nova.compute.manager [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Took 1.13 seconds to destroy the instance on the hypervisor. [ 992.644036] env[62070]: DEBUG oslo.service.loopingcall [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 992.644253] env[62070]: DEBUG nova.compute.manager [-] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 992.644348] env[62070]: DEBUG nova.network.neutron [-] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 992.665761] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52c7e3e8-b3b7-3b9c-64c5-ee6fc295adea, 'name': SearchDatastore_Task, 'duration_secs': 0.010828} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.666047] env[62070]: DEBUG oslo_concurrency.lockutils [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 992.666291] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 992.666531] env[62070]: DEBUG oslo_concurrency.lockutils [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 992.666684] env[62070]: DEBUG oslo_concurrency.lockutils [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.666872] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 992.667150] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-09a45ada-9eb6-40a3-b30e-3b8bddf6db12 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.681421] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 992.681606] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 992.682323] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40b4fe23-644d-4ca2-a668-0cefbd07a58c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.687571] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 992.687571] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52dff24d-deb0-a843-def2-f15b150e4224" [ 992.687571] env[62070]: _type = "Task" [ 992.687571] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.695089] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52dff24d-deb0-a843-def2-f15b150e4224, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.772750] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122212, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.796598] env[62070]: DEBUG oslo_vmware.api [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122213, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17416} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.796864] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 992.797070] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 992.797254] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 992.797431] env[62070]: INFO nova.compute.manager [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Took 1.54 seconds to destroy the instance on the hypervisor. [ 992.797692] env[62070]: DEBUG oslo.service.loopingcall [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 992.797923] env[62070]: DEBUG nova.compute.manager [-] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 992.798037] env[62070]: DEBUG nova.network.neutron [-] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 992.802520] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.423s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.802708] env[62070]: INFO nova.compute.manager [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Migrating [ 992.813547] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 11.882s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.064317] env[62070]: DEBUG nova.compute.manager [req-46c555f7-0c4b-4a77-ab9c-8834eedcda69 req-cf152bea-65f4-40fd-98fb-63405c6aa2c7 service nova] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Received event network-vif-deleted-7b9b5e67-62e1-441d-b99d-381c1233f050 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 993.064636] env[62070]: INFO nova.compute.manager [req-46c555f7-0c4b-4a77-ab9c-8834eedcda69 req-cf152bea-65f4-40fd-98fb-63405c6aa2c7 service nova] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Neutron deleted interface 7b9b5e67-62e1-441d-b99d-381c1233f050; detaching it from the instance and deleting it from the info cache [ 993.064907] env[62070]: DEBUG nova.network.neutron [req-46c555f7-0c4b-4a77-ab9c-8834eedcda69 req-cf152bea-65f4-40fd-98fb-63405c6aa2c7 service nova] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.071211] env[62070]: DEBUG oslo_concurrency.lockutils [req-56225b41-ac75-40c8-abad-08a0d7a9c33c req-d9cb0957-6fca-4332-b16a-462a8187e931 service nova] Releasing lock "refresh_cache-f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.199097] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52dff24d-deb0-a843-def2-f15b150e4224, 'name': SearchDatastore_Task, 'duration_secs': 0.049231} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.199889] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b3cf3b1-d3ec-4147-9612-22173d7127d9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.205670] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 993.205670] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52e27e80-c0ec-405c-e638-5b9566a0b0c9" [ 993.205670] env[62070]: _type = "Task" [ 993.205670] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.213989] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52e27e80-c0ec-405c-e638-5b9566a0b0c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.272815] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122212, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.320056] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "refresh_cache-b101c79a-abfd-4104-aaed-096995fb2337" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.320243] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired lock "refresh_cache-b101c79a-abfd-4104-aaed-096995fb2337" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.320433] env[62070]: DEBUG nova.network.neutron [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 993.324745] env[62070]: INFO nova.compute.claims [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 993.395463] env[62070]: DEBUG nova.network.neutron [-] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.424122] env[62070]: DEBUG nova.objects.instance [None req-0f7be80c-62fb-418e-8b92-cd4fcab4bae0 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lazy-loading 'flavor' on Instance uuid 84c00e4a-20d3-4739-8535-e27076d85a89 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 993.535856] env[62070]: DEBUG nova.network.neutron [-] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.568516] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-66136943-1d35-4da0-b4c5-201a8dbe1008 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.579991] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e33316bc-6bec-48a1-987c-ebd76862140e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.601596] env[62070]: DEBUG nova.network.neutron [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Updating instance_info_cache with network_info: [{"id": "5b226cbf-df38-4b34-b591-7afc6de0a88c", "address": "fa:16:3e:a3:e3:84", "network": {"id": "4888f989-958d-49ff-bf5a-06873e4cc624", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-906255456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d079c0ef3ed745fcaf69dc728dca4466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b226cbf-df", "ovs_interfaceid": "5b226cbf-df38-4b34-b591-7afc6de0a88c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.615638] env[62070]: DEBUG nova.compute.manager [req-46c555f7-0c4b-4a77-ab9c-8834eedcda69 req-cf152bea-65f4-40fd-98fb-63405c6aa2c7 service nova] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Detach interface failed, port_id=7b9b5e67-62e1-441d-b99d-381c1233f050, reason: Instance e4cf42ff-8440-42bc-b629-4b712fd94e99 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 993.718251] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52e27e80-c0ec-405c-e638-5b9566a0b0c9, 'name': SearchDatastore_Task, 'duration_secs': 0.020471} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.718522] env[62070]: DEBUG oslo_concurrency.lockutils [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.718785] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f/f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 993.719116] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e077783d-5bf7-43bd-8516-4958e808816f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.726527] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 993.726527] env[62070]: value = "task-1122214" [ 993.726527] env[62070]: _type = "Task" [ 993.726527] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.734752] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122214, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.772761] env[62070]: DEBUG oslo_vmware.api [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122212, 'name': PowerOnVM_Task, 'duration_secs': 1.054571} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.773056] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 993.773273] env[62070]: INFO nova.compute.manager [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Took 8.61 seconds to spawn the instance on the hypervisor. [ 993.773459] env[62070]: DEBUG nova.compute.manager [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 993.774299] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93d22fc0-144b-409c-8de3-6caef49900cd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.835920] env[62070]: INFO nova.compute.resource_tracker [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Updating resource usage from migration e930f8db-6e00-40a3-baf3-db252b53e586 [ 993.847024] env[62070]: DEBUG nova.compute.manager [req-272f6993-df0e-49e5-a956-c629fea41e5f req-11678c28-71c0-4b57-b35a-2551a85598d4 service nova] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Received event network-vif-deleted-45420f68-e309-4569-8dac-28e16d9417d7 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 993.898327] env[62070]: INFO nova.compute.manager [-] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Took 1.25 seconds to deallocate network for instance. [ 993.929036] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0f7be80c-62fb-418e-8b92-cd4fcab4bae0 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "84c00e4a-20d3-4739-8535-e27076d85a89" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.311s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.039502] env[62070]: INFO nova.compute.manager [-] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Took 1.24 seconds to deallocate network for instance. [ 994.100303] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df533e23-0387-4068-afd7-d895cebe4f3e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.103994] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Releasing lock "refresh_cache-b101c79a-abfd-4104-aaed-096995fb2337" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.113070] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0ba5fa7-b945-40cc-a263-cbe8841d250c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.149152] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ddaff9-c91e-47f1-912a-1825b264f025 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.160537] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a9a23c-11d2-4771-a8a2-a8324f61af73 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.179706] env[62070]: DEBUG nova.compute.provider_tree [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 994.239160] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122214, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.292844] env[62070]: INFO nova.compute.manager [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Took 22.78 seconds to build instance. [ 994.354057] env[62070]: INFO nova.compute.manager [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Rebuilding instance [ 994.398828] env[62070]: DEBUG nova.compute.manager [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 994.399707] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70cf6b47-fa7a-40ad-a447-e50e52b9042c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.405443] env[62070]: DEBUG oslo_concurrency.lockutils [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.546644] env[62070]: DEBUG oslo_concurrency.lockutils [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.710549] env[62070]: DEBUG nova.scheduler.client.report [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Updated inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with generation 134 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 994.710841] env[62070]: DEBUG nova.compute.provider_tree [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Updating resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 generation from 134 to 135 during operation: update_inventory {{(pid=62070) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 994.711040] env[62070]: DEBUG nova.compute.provider_tree [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 994.740553] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122214, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.556939} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.740839] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f/f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 994.740912] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 994.741289] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-235c8d54-f14b-4cc9-9f8d-422759d986f7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.750631] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 994.750631] env[62070]: value = "task-1122215" [ 994.750631] env[62070]: _type = "Task" [ 994.750631] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.762601] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122215, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.794678] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ca3baa9f-e172-43d5-a6ee-07c9fa3a6821 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "6cba961f-f9f9-4d3c-853a-049a014c9dbb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.301s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.912372] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 994.912681] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51442491-b5de-4894-bfe7-b755f0961bf9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.920519] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 994.920519] env[62070]: value = "task-1122216" [ 994.920519] env[62070]: _type = "Task" [ 994.920519] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.929798] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122216, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.216739] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.403s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.216960] env[62070]: INFO nova.compute.manager [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Migrating [ 995.223336] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.680s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.224739] env[62070]: INFO nova.compute.claims [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 995.263452] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122215, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078847} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.263744] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 995.264579] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-291c00e1-2002-4192-8b0d-c83f10442449 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.271391] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59acff23-e1a4-4dd7-b091-bd86488d6449 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.292242] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f/f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 995.293195] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e911375-9c34-4ad7-bfb6-a86eddfded79 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.308938] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-de2db677-4068-4610-95ef-4b162f5f21e6 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Suspending the VM {{(pid=62070) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1163}} [ 995.309571] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-a78f021e-a6cf-44c3-9b57-313c1c77797a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.314597] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 995.314597] env[62070]: value = "task-1122217" [ 995.314597] env[62070]: _type = "Task" [ 995.314597] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.318682] env[62070]: DEBUG oslo_vmware.api [None req-de2db677-4068-4610-95ef-4b162f5f21e6 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 995.318682] env[62070]: value = "task-1122218" [ 995.318682] env[62070]: _type = "Task" [ 995.318682] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.324885] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122217, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.329975] env[62070]: DEBUG oslo_vmware.api [None req-de2db677-4068-4610-95ef-4b162f5f21e6 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122218, 'name': SuspendVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.433755] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122216, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.619493] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb4109e6-13d1-4537-bc88-f4d0320da157 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.642489] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Updating instance 'b101c79a-abfd-4104-aaed-096995fb2337' progress to 0 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 995.737021] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.737021] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.737021] env[62070]: DEBUG nova.network.neutron [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 995.826367] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122217, 'name': ReconfigVM_Task, 'duration_secs': 0.482753} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.827124] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Reconfigured VM instance instance-0000005d to attach disk [datastore1] f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f/f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 995.827873] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-36e69179-4009-4c6d-b666-a8c7153d3aa2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.832496] env[62070]: DEBUG oslo_vmware.api [None req-de2db677-4068-4610-95ef-4b162f5f21e6 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122218, 'name': SuspendVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.838066] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 995.838066] env[62070]: value = "task-1122219" [ 995.838066] env[62070]: _type = "Task" [ 995.838066] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.846105] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122219, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.932686] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122216, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.149472] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 996.149795] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-105dbf8c-b7f4-48f1-8a0b-a920537348ce {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.157600] env[62070]: DEBUG oslo_vmware.api [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 996.157600] env[62070]: value = "task-1122220" [ 996.157600] env[62070]: _type = "Task" [ 996.157600] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.170288] env[62070]: DEBUG oslo_vmware.api [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122220, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.329888] env[62070]: DEBUG oslo_vmware.api [None req-de2db677-4068-4610-95ef-4b162f5f21e6 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122218, 'name': SuspendVM_Task, 'duration_secs': 0.807593} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.334599] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-de2db677-4068-4610-95ef-4b162f5f21e6 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Suspended the VM {{(pid=62070) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1167}} [ 996.334821] env[62070]: DEBUG nova.compute.manager [None req-de2db677-4068-4610-95ef-4b162f5f21e6 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 996.335848] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90519685-034d-41d1-b45b-f66e0b46a110 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.353507] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122219, 'name': Rename_Task, 'duration_secs': 0.162263} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.354464] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 996.354464] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4c285602-0cea-4974-b5af-2ef8783f0edb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.361226] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 996.361226] env[62070]: value = "task-1122221" [ 996.361226] env[62070]: _type = "Task" [ 996.361226] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.373420] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122221, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.441610] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122216, 'name': PowerOffVM_Task, 'duration_secs': 1.192107} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.441610] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 996.518956] env[62070]: INFO nova.compute.manager [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Detaching volume 773d984d-4185-4716-a44d-6527016a9c86 [ 996.536539] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc9cf521-2dea-4d98-863a-64619aadb894 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.544754] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f38054a-55b8-4d9c-95de-39b22c75f73a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.549777] env[62070]: DEBUG nova.network.neutron [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Updating instance_info_cache with network_info: [{"id": "a3ed0957-14c2-4144-8d45-f4a0e5cb45ab", "address": "fa:16:3e:3c:6a:3d", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3ed0957-14", "ovs_interfaceid": "a3ed0957-14c2-4144-8d45-f4a0e5cb45ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.578544] env[62070]: INFO nova.virt.block_device [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Attempting to driver detach volume 773d984d-4185-4716-a44d-6527016a9c86 from mountpoint /dev/sdb [ 996.578787] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Volume detach. Driver type: vmdk {{(pid=62070) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 996.579014] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245480', 'volume_id': '773d984d-4185-4716-a44d-6527016a9c86', 'name': 'volume-773d984d-4185-4716-a44d-6527016a9c86', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '84c00e4a-20d3-4739-8535-e27076d85a89', 'attached_at': '', 'detached_at': '', 'volume_id': '773d984d-4185-4716-a44d-6527016a9c86', 'serial': '773d984d-4185-4716-a44d-6527016a9c86'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 996.579797] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c48c5f-d210-4441-bd1d-f7d854c8f74e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.583042] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7458002e-e4d6-41da-b044-07e861d9d5ac {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.606536] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72657eef-0d6a-403b-89d5-0a17bfaee380 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.611153] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-486c6a34-a973-4e4e-9bba-02f406019746 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.623642] env[62070]: DEBUG nova.compute.provider_tree [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 996.626944] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9fc8717-5f44-4efe-a8fc-8c0d8d7275db {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.647901] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-123e860a-7366-4e8c-871c-aa6944ca19e2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.664435] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] The volume has not been displaced from its original location: [datastore1] volume-773d984d-4185-4716-a44d-6527016a9c86/volume-773d984d-4185-4716-a44d-6527016a9c86.vmdk. No consolidation needed. {{(pid=62070) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 996.670722] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Reconfiguring VM instance instance-00000053 to detach disk 2001 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 996.673831] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8bc3dc89-bb82-400d-9fb9-5f7d05c5448f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.693460] env[62070]: DEBUG oslo_vmware.api [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122220, 'name': PowerOffVM_Task, 'duration_secs': 0.188365} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.694847] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 996.695061] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Updating instance 'b101c79a-abfd-4104-aaed-096995fb2337' progress to 17 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 996.698452] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 996.698452] env[62070]: value = "task-1122222" [ 996.698452] env[62070]: _type = "Task" [ 996.698452] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.710201] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122222, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.871621] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122221, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.941074] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6634cfa8-faf4-49b4-9551-5727bdb6dbd8 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "7dc27fe6-495f-498d-88fe-a99ddc19a21c" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.941367] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6634cfa8-faf4-49b4-9551-5727bdb6dbd8 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "7dc27fe6-495f-498d-88fe-a99ddc19a21c" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.078527] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Releasing lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.128315] env[62070]: DEBUG nova.scheduler.client.report [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 997.207188] env[62070]: DEBUG nova.virt.hardware [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 997.208295] env[62070]: DEBUG nova.virt.hardware [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 997.208295] env[62070]: DEBUG nova.virt.hardware [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 997.208295] env[62070]: DEBUG nova.virt.hardware [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 997.208295] env[62070]: DEBUG nova.virt.hardware [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 997.208295] env[62070]: DEBUG nova.virt.hardware [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 997.208836] env[62070]: DEBUG nova.virt.hardware [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 997.209358] env[62070]: DEBUG nova.virt.hardware [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 997.209358] env[62070]: DEBUG nova.virt.hardware [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 997.209358] env[62070]: DEBUG nova.virt.hardware [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 997.209526] env[62070]: DEBUG nova.virt.hardware [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 997.218658] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c84ba13-cb7a-4b16-aeb4-4edda05e0b89 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.229157] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122222, 'name': ReconfigVM_Task, 'duration_secs': 0.496453} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.230286] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Reconfigured VM instance instance-00000053 to detach disk 2001 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 997.235509] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57893c51-0164-47e3-b688-876bd0f38583 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.247518] env[62070]: DEBUG oslo_vmware.api [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 997.247518] env[62070]: value = "task-1122223" [ 997.247518] env[62070]: _type = "Task" [ 997.247518] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.253547] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 997.253547] env[62070]: value = "task-1122224" [ 997.253547] env[62070]: _type = "Task" [ 997.253547] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.259874] env[62070]: DEBUG oslo_vmware.api [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122223, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.267603] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122224, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.373042] env[62070]: DEBUG oslo_vmware.api [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122221, 'name': PowerOnVM_Task, 'duration_secs': 0.554293} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.373042] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 997.373483] env[62070]: INFO nova.compute.manager [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Took 7.88 seconds to spawn the instance on the hypervisor. [ 997.373483] env[62070]: DEBUG nova.compute.manager [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 997.374411] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da535d4a-5b0d-4f54-8433-62fbb96e6e5b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.444575] env[62070]: INFO nova.compute.manager [None req-6634cfa8-faf4-49b4-9551-5727bdb6dbd8 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Detaching volume c400637e-5e41-43a1-91df-a0f4d9c4e253 [ 997.479912] env[62070]: INFO nova.virt.block_device [None req-6634cfa8-faf4-49b4-9551-5727bdb6dbd8 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Attempting to driver detach volume c400637e-5e41-43a1-91df-a0f4d9c4e253 from mountpoint /dev/sdb [ 997.480203] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-6634cfa8-faf4-49b4-9551-5727bdb6dbd8 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Volume detach. Driver type: vmdk {{(pid=62070) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 997.480404] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-6634cfa8-faf4-49b4-9551-5727bdb6dbd8 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245461', 'volume_id': 'c400637e-5e41-43a1-91df-a0f4d9c4e253', 'name': 'volume-c400637e-5e41-43a1-91df-a0f4d9c4e253', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '7dc27fe6-495f-498d-88fe-a99ddc19a21c', 'attached_at': '', 'detached_at': '', 'volume_id': 'c400637e-5e41-43a1-91df-a0f4d9c4e253', 'serial': 'c400637e-5e41-43a1-91df-a0f4d9c4e253'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 997.481567] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca97a64-2009-4aa1-b12a-e5e8f357f64e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.504522] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40972fae-a109-4318-90a8-9405d1c62f08 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.511345] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b1437b-a97a-45b5-a62a-3261cab54227 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.532107] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22263cc1-0366-4378-8460-d4ebbc46af88 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.547103] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-6634cfa8-faf4-49b4-9551-5727bdb6dbd8 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] The volume has not been displaced from its original location: [datastore2] volume-c400637e-5e41-43a1-91df-a0f4d9c4e253/volume-c400637e-5e41-43a1-91df-a0f4d9c4e253.vmdk. No consolidation needed. {{(pid=62070) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 997.552440] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-6634cfa8-faf4-49b4-9551-5727bdb6dbd8 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Reconfiguring VM instance instance-0000004e to detach disk 2001 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 997.552750] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e93c59c-394b-4b2b-9b87-e0b4902194ab {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.572399] env[62070]: DEBUG oslo_vmware.api [None req-6634cfa8-faf4-49b4-9551-5727bdb6dbd8 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 997.572399] env[62070]: value = "task-1122225" [ 997.572399] env[62070]: _type = "Task" [ 997.572399] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.580176] env[62070]: DEBUG oslo_vmware.api [None req-6634cfa8-faf4-49b4-9551-5727bdb6dbd8 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122225, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.634397] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.411s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.634940] env[62070]: DEBUG nova.compute.manager [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 997.637484] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.229s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.639129] env[62070]: INFO nova.compute.claims [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 997.700586] env[62070]: DEBUG oslo_concurrency.lockutils [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "6cba961f-f9f9-4d3c-853a-049a014c9dbb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.700849] env[62070]: DEBUG oslo_concurrency.lockutils [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "6cba961f-f9f9-4d3c-853a-049a014c9dbb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.701114] env[62070]: DEBUG oslo_concurrency.lockutils [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "6cba961f-f9f9-4d3c-853a-049a014c9dbb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.701322] env[62070]: DEBUG oslo_concurrency.lockutils [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "6cba961f-f9f9-4d3c-853a-049a014c9dbb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.701519] env[62070]: DEBUG oslo_concurrency.lockutils [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "6cba961f-f9f9-4d3c-853a-049a014c9dbb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.703570] env[62070]: INFO nova.compute.manager [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Terminating instance [ 997.705357] env[62070]: DEBUG nova.compute.manager [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 997.705556] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 997.706506] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44abf79f-932d-417b-97ec-d3a7c62cf133 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.715012] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 997.715255] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-91ff60ff-a9b4-466c-920b-13fe18618d00 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.757197] env[62070]: DEBUG oslo_vmware.api [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122223, 'name': ReconfigVM_Task, 'duration_secs': 0.233963} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.760265] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Updating instance 'b101c79a-abfd-4104-aaed-096995fb2337' progress to 33 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 997.768289] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122224, 'name': ReconfigVM_Task, 'duration_secs': 0.22735} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.768600] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245480', 'volume_id': '773d984d-4185-4716-a44d-6527016a9c86', 'name': 'volume-773d984d-4185-4716-a44d-6527016a9c86', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '84c00e4a-20d3-4739-8535-e27076d85a89', 'attached_at': '', 'detached_at': '', 'volume_id': '773d984d-4185-4716-a44d-6527016a9c86', 'serial': '773d984d-4185-4716-a44d-6527016a9c86'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 997.892084] env[62070]: INFO nova.compute.manager [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Took 19.59 seconds to build instance. [ 998.084250] env[62070]: DEBUG oslo_vmware.api [None req-6634cfa8-faf4-49b4-9551-5727bdb6dbd8 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122225, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.143613] env[62070]: DEBUG nova.compute.utils [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 998.148276] env[62070]: DEBUG nova.compute.manager [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 998.148392] env[62070]: DEBUG nova.network.neutron [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 998.196402] env[62070]: DEBUG nova.policy [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20349b8cb1f24d5588d6109b09a335de', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e1960779e94c4e119497a0c1117f54fc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 998.266335] env[62070]: DEBUG nova.virt.hardware [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 998.266569] env[62070]: DEBUG nova.virt.hardware [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 998.266727] env[62070]: DEBUG nova.virt.hardware [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 998.266906] env[62070]: DEBUG nova.virt.hardware [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 998.267064] env[62070]: DEBUG nova.virt.hardware [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 998.267213] env[62070]: DEBUG nova.virt.hardware [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 998.267407] env[62070]: DEBUG nova.virt.hardware [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 998.267562] env[62070]: DEBUG nova.virt.hardware [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 998.267722] env[62070]: DEBUG nova.virt.hardware [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 998.268472] env[62070]: DEBUG nova.virt.hardware [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 998.268472] env[62070]: DEBUG nova.virt.hardware [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 998.273333] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Reconfiguring VM instance instance-0000005a to detach disk 2000 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 998.273614] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b272fa3-9c4c-4431-b9ec-4fd50c31cca8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.294451] env[62070]: DEBUG oslo_vmware.api [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 998.294451] env[62070]: value = "task-1122227" [ 998.294451] env[62070]: _type = "Task" [ 998.294451] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.303484] env[62070]: DEBUG oslo_vmware.api [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122227, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.394283] env[62070]: DEBUG oslo_concurrency.lockutils [None req-10516eed-24ef-4548-bde8-566a3d8bb59d tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.105s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.579394] env[62070]: DEBUG nova.network.neutron [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Successfully created port: f30abb0e-6245-49cc-912a-4685dac5186b {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 998.592165] env[62070]: DEBUG oslo_vmware.api [None req-6634cfa8-faf4-49b4-9551-5727bdb6dbd8 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122225, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.593457] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd67261-f7ea-4ff4-a6e6-a48cd957a669 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.611549] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Updating instance '71aead12-a182-40a7-b5a9-91c01271b800' progress to 0 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 998.649452] env[62070]: DEBUG nova.compute.manager [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 998.676875] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 998.680422] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 998.680422] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Deleting the datastore file [datastore1] 6cba961f-f9f9-4d3c-853a-049a014c9dbb {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 998.680422] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-22928dfb-0bf5-4e6d-ba7c-2991d792c3ab {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.687114] env[62070]: DEBUG oslo_vmware.api [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 998.687114] env[62070]: value = "task-1122228" [ 998.687114] env[62070]: _type = "Task" [ 998.687114] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.697494] env[62070]: DEBUG oslo_vmware.api [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122228, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.805225] env[62070]: DEBUG oslo_vmware.api [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122227, 'name': ReconfigVM_Task, 'duration_secs': 0.163294} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.805475] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Reconfigured VM instance instance-0000005a to detach disk 2000 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 998.806280] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59a5fe2f-de2a-445a-b10f-2f86a1f05147 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.835122] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] b101c79a-abfd-4104-aaed-096995fb2337/b101c79a-abfd-4104-aaed-096995fb2337.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 998.838593] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-39fdf413-c62e-4db8-8d14-94b33ab442e3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.852922] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 998.852986] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a98eedc5-4562-4973-937d-c08175298f8a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.859976] env[62070]: DEBUG oslo_vmware.api [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 998.859976] env[62070]: value = "task-1122230" [ 998.859976] env[62070]: _type = "Task" [ 998.859976] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.861351] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 998.861351] env[62070]: value = "task-1122229" [ 998.861351] env[62070]: _type = "Task" [ 998.861351] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.875279] env[62070]: DEBUG oslo_vmware.api [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122230, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.879081] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] VM already powered off {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 998.879419] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Volume detach. Driver type: vmdk {{(pid=62070) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 998.879606] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245480', 'volume_id': '773d984d-4185-4716-a44d-6527016a9c86', 'name': 'volume-773d984d-4185-4716-a44d-6527016a9c86', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '84c00e4a-20d3-4739-8535-e27076d85a89', 'attached_at': '', 'detached_at': '', 'volume_id': '773d984d-4185-4716-a44d-6527016a9c86', 'serial': '773d984d-4185-4716-a44d-6527016a9c86'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 998.880551] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e5fcb0-6ca0-4b77-ae61-1cb872cfba59 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.905494] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09bcf47a-1e98-4312-8aca-038698ba7988 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.912296] env[62070]: WARNING nova.virt.vmwareapi.driver [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 998.912635] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 998.913402] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-030e7dce-f906-4f27-a6ad-f87a239b5e9f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.922478] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 998.922478] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-72638049-0172-4ec2-b3c8-7be63c800273 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.973076] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0334de28-33f3-41f4-9c30-02a2f9bd2b03 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.982461] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c3f7e6-e0cf-4b77-a4cf-6e00a2600020 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.987589] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 998.987968] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 998.988240] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Deleting the datastore file [datastore2] 84c00e4a-20d3-4739-8535-e27076d85a89 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 998.989027] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-12cfbfe5-3cb6-4810-96dc-62de88e0caa3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.021859] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c718242-8c18-4f0b-ba60-2918495222bc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.024830] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 999.024830] env[62070]: value = "task-1122232" [ 999.024830] env[62070]: _type = "Task" [ 999.024830] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.033961] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc2f5e02-65af-4cfd-aac6-c95def5941ac {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.041468] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122232, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.051932] env[62070]: DEBUG nova.compute.provider_tree [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 999.082988] env[62070]: DEBUG oslo_vmware.api [None req-6634cfa8-faf4-49b4-9551-5727bdb6dbd8 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122225, 'name': ReconfigVM_Task, 'duration_secs': 1.42865} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.083344] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-6634cfa8-faf4-49b4-9551-5727bdb6dbd8 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Reconfigured VM instance instance-0000004e to detach disk 2001 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 999.088816] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f620940f-a6e9-4b4c-9ac9-0e52a65a64c7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.103932] env[62070]: DEBUG oslo_vmware.api [None req-6634cfa8-faf4-49b4-9551-5727bdb6dbd8 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 999.103932] env[62070]: value = "task-1122233" [ 999.103932] env[62070]: _type = "Task" [ 999.103932] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.111891] env[62070]: DEBUG oslo_vmware.api [None req-6634cfa8-faf4-49b4-9551-5727bdb6dbd8 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122233, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.117778] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 999.118410] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17c8b013-1d31-4bad-8150-5168847ab3b5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.125570] env[62070]: DEBUG oslo_vmware.api [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 999.125570] env[62070]: value = "task-1122234" [ 999.125570] env[62070]: _type = "Task" [ 999.125570] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.128982] env[62070]: DEBUG oslo_concurrency.lockutils [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.129383] env[62070]: DEBUG oslo_concurrency.lockutils [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.129977] env[62070]: DEBUG oslo_concurrency.lockutils [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.130118] env[62070]: DEBUG oslo_concurrency.lockutils [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.130330] env[62070]: DEBUG oslo_concurrency.lockutils [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.132896] env[62070]: INFO nova.compute.manager [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Terminating instance [ 999.137854] env[62070]: DEBUG oslo_vmware.api [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122234, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.138585] env[62070]: DEBUG nova.compute.manager [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 999.138868] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 999.139723] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72411efb-84ca-4805-91b0-11d0210b86bc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.146942] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 999.147254] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c00b0d2-9991-409a-b2bf-521b0dbc554b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.159096] env[62070]: DEBUG oslo_vmware.api [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 999.159096] env[62070]: value = "task-1122235" [ 999.159096] env[62070]: _type = "Task" [ 999.159096] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.168143] env[62070]: DEBUG oslo_vmware.api [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122235, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.198814] env[62070]: DEBUG oslo_vmware.api [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122228, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.219697} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.199105] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 999.199574] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 999.199574] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 999.199741] env[62070]: INFO nova.compute.manager [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Took 1.49 seconds to destroy the instance on the hypervisor. [ 999.199884] env[62070]: DEBUG oslo.service.loopingcall [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 999.200114] env[62070]: DEBUG nova.compute.manager [-] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 999.200205] env[62070]: DEBUG nova.network.neutron [-] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 999.371389] env[62070]: DEBUG oslo_vmware.api [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122230, 'name': ReconfigVM_Task, 'duration_secs': 0.266686} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.371721] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Reconfigured VM instance instance-0000005a to attach disk [datastore2] b101c79a-abfd-4104-aaed-096995fb2337/b101c79a-abfd-4104-aaed-096995fb2337.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 999.372017] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Updating instance 'b101c79a-abfd-4104-aaed-096995fb2337' progress to 50 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 999.451614] env[62070]: DEBUG nova.compute.manager [req-6257f81c-0d95-436e-add0-3d69fac2885a req-661b3221-59ef-45d7-8c38-5233d8482ddc service nova] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Received event network-vif-deleted-2c284e8d-6670-4b5c-b9b9-b279c81efea8 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 999.451845] env[62070]: INFO nova.compute.manager [req-6257f81c-0d95-436e-add0-3d69fac2885a req-661b3221-59ef-45d7-8c38-5233d8482ddc service nova] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Neutron deleted interface 2c284e8d-6670-4b5c-b9b9-b279c81efea8; detaching it from the instance and deleting it from the info cache [ 999.452015] env[62070]: DEBUG nova.network.neutron [req-6257f81c-0d95-436e-add0-3d69fac2885a req-661b3221-59ef-45d7-8c38-5233d8482ddc service nova] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.534977] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122232, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161325} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.535263] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 999.535454] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 999.535635] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 999.554403] env[62070]: DEBUG nova.scheduler.client.report [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 999.615526] env[62070]: DEBUG oslo_vmware.api [None req-6634cfa8-faf4-49b4-9551-5727bdb6dbd8 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122233, 'name': ReconfigVM_Task, 'duration_secs': 0.179312} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.615983] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-6634cfa8-faf4-49b4-9551-5727bdb6dbd8 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245461', 'volume_id': 'c400637e-5e41-43a1-91df-a0f4d9c4e253', 'name': 'volume-c400637e-5e41-43a1-91df-a0f4d9c4e253', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '7dc27fe6-495f-498d-88fe-a99ddc19a21c', 'attached_at': '', 'detached_at': '', 'volume_id': 'c400637e-5e41-43a1-91df-a0f4d9c4e253', 'serial': 'c400637e-5e41-43a1-91df-a0f4d9c4e253'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 999.634032] env[62070]: DEBUG oslo_vmware.api [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122234, 'name': PowerOffVM_Task, 'duration_secs': 0.178628} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.634312] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 999.634843] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Updating instance '71aead12-a182-40a7-b5a9-91c01271b800' progress to 17 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 999.661761] env[62070]: DEBUG nova.compute.manager [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 999.674049] env[62070]: DEBUG oslo_vmware.api [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122235, 'name': PowerOffVM_Task, 'duration_secs': 0.170616} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.674049] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 999.674217] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 999.674400] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51879d36-017b-49ba-ab2a-85757e8add98 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.687623] env[62070]: DEBUG nova.virt.hardware [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 999.687917] env[62070]: DEBUG nova.virt.hardware [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 999.688106] env[62070]: DEBUG nova.virt.hardware [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 999.688300] env[62070]: DEBUG nova.virt.hardware [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 999.688451] env[62070]: DEBUG nova.virt.hardware [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 999.688601] env[62070]: DEBUG nova.virt.hardware [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 999.688810] env[62070]: DEBUG nova.virt.hardware [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 999.689007] env[62070]: DEBUG nova.virt.hardware [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 999.689204] env[62070]: DEBUG nova.virt.hardware [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 999.689375] env[62070]: DEBUG nova.virt.hardware [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 999.689570] env[62070]: DEBUG nova.virt.hardware [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 999.690613] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e3e9b0-1cbe-431b-978e-aa3b8de8664b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.698114] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c98a26-4548-49c5-9ab2-dfccae6137c5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.764482] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 999.764724] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 999.764947] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Deleting the datastore file [datastore1] f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 999.765235] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-96cf4e4d-2fad-4304-825c-fc27819462af {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.771286] env[62070]: DEBUG oslo_vmware.api [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 999.771286] env[62070]: value = "task-1122237" [ 999.771286] env[62070]: _type = "Task" [ 999.771286] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.779224] env[62070]: DEBUG oslo_vmware.api [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122237, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.878435] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3440b1b9-6a03-49a6-b114-f21d819aeeed {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.897305] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a99c4e2e-d4fe-43be-8975-6b8d7ed4ef0a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.915074] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Updating instance 'b101c79a-abfd-4104-aaed-096995fb2337' progress to 67 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 999.929311] env[62070]: DEBUG nova.network.neutron [-] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.954997] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-36a1ad31-e705-44a9-ac10-d6187a911ab0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.964378] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e6a7fd-0364-44fc-80aa-c9975a85d932 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.998229] env[62070]: DEBUG nova.compute.manager [req-6257f81c-0d95-436e-add0-3d69fac2885a req-661b3221-59ef-45d7-8c38-5233d8482ddc service nova] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Detach interface failed, port_id=2c284e8d-6670-4b5c-b9b9-b279c81efea8, reason: Instance 6cba961f-f9f9-4d3c-853a-049a014c9dbb could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1000.040172] env[62070]: INFO nova.virt.block_device [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Booting with volume 773d984d-4185-4716-a44d-6527016a9c86 at /dev/sdb [ 1000.059174] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.422s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.059714] env[62070]: DEBUG nova.compute.manager [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1000.064024] env[62070]: DEBUG oslo_concurrency.lockutils [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.657s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.064024] env[62070]: DEBUG nova.objects.instance [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lazy-loading 'resources' on Instance uuid 21bcb1a6-833b-48f3-8ee2-0e49c64a104f {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1000.091662] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-61b8ba2c-9be8-4cee-9662-cb332219bb02 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.102385] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b856b80f-2ed0-47d7-b4f1-3a27c5c8aa4b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.131454] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fb9748f0-889d-47ea-8be8-63ae9df5959b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.139394] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72fa9d35-fda8-4981-9336-c7f4a4f28a99 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.153638] env[62070]: DEBUG nova.virt.hardware [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1000.154046] env[62070]: DEBUG nova.virt.hardware [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1000.154272] env[62070]: DEBUG nova.virt.hardware [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1000.154477] env[62070]: DEBUG nova.virt.hardware [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1000.154636] env[62070]: DEBUG nova.virt.hardware [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1000.154784] env[62070]: DEBUG nova.virt.hardware [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1000.154992] env[62070]: DEBUG nova.virt.hardware [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1000.155173] env[62070]: DEBUG nova.virt.hardware [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1000.155341] env[62070]: DEBUG nova.virt.hardware [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1000.155509] env[62070]: DEBUG nova.virt.hardware [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1000.155727] env[62070]: DEBUG nova.virt.hardware [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1000.161157] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35cfb2b5-150c-426d-97d9-ec4124c80612 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.182576] env[62070]: DEBUG nova.objects.instance [None req-6634cfa8-faf4-49b4-9551-5727bdb6dbd8 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lazy-loading 'flavor' on Instance uuid 7dc27fe6-495f-498d-88fe-a99ddc19a21c {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1000.195111] env[62070]: DEBUG oslo_vmware.api [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1000.195111] env[62070]: value = "task-1122238" [ 1000.195111] env[62070]: _type = "Task" [ 1000.195111] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.196041] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b274203-53c2-4302-96d4-98330f8c73ed {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.218577] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dac2ab1-5c38-4b04-ba1b-0a5ad3aef489 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.221069] env[62070]: DEBUG oslo_vmware.api [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122238, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.223640] env[62070]: DEBUG nova.compute.manager [req-48527ce2-253d-45d3-9119-abb1183afc8e req-d527147b-c7af-49cc-b353-4ba29f70f323 service nova] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Received event network-vif-plugged-f30abb0e-6245-49cc-912a-4685dac5186b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1000.223698] env[62070]: DEBUG oslo_concurrency.lockutils [req-48527ce2-253d-45d3-9119-abb1183afc8e req-d527147b-c7af-49cc-b353-4ba29f70f323 service nova] Acquiring lock "b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.223907] env[62070]: DEBUG oslo_concurrency.lockutils [req-48527ce2-253d-45d3-9119-abb1183afc8e req-d527147b-c7af-49cc-b353-4ba29f70f323 service nova] Lock "b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.224110] env[62070]: DEBUG oslo_concurrency.lockutils [req-48527ce2-253d-45d3-9119-abb1183afc8e req-d527147b-c7af-49cc-b353-4ba29f70f323 service nova] Lock "b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.224308] env[62070]: DEBUG nova.compute.manager [req-48527ce2-253d-45d3-9119-abb1183afc8e req-d527147b-c7af-49cc-b353-4ba29f70f323 service nova] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] No waiting events found dispatching network-vif-plugged-f30abb0e-6245-49cc-912a-4685dac5186b {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1000.224544] env[62070]: WARNING nova.compute.manager [req-48527ce2-253d-45d3-9119-abb1183afc8e req-d527147b-c7af-49cc-b353-4ba29f70f323 service nova] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Received unexpected event network-vif-plugged-f30abb0e-6245-49cc-912a-4685dac5186b for instance with vm_state building and task_state spawning. [ 1000.233930] env[62070]: DEBUG nova.virt.block_device [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Updating existing volume attachment record: 804dca2e-f44c-407c-bc2d-1dfe0713264e {{(pid=62070) _volume_attach /opt/stack/nova/nova/virt/block_device.py:679}} [ 1000.282820] env[62070]: DEBUG oslo_vmware.api [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122237, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.183064} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.283173] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1000.283415] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1000.283647] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1000.283874] env[62070]: INFO nova.compute.manager [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1000.284250] env[62070]: DEBUG oslo.service.loopingcall [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1000.284511] env[62070]: DEBUG nova.compute.manager [-] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1000.284613] env[62070]: DEBUG nova.network.neutron [-] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1000.293726] env[62070]: DEBUG nova.network.neutron [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Successfully updated port: f30abb0e-6245-49cc-912a-4685dac5186b {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1000.431659] env[62070]: INFO nova.compute.manager [-] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Took 1.23 seconds to deallocate network for instance. [ 1000.457503] env[62070]: DEBUG nova.network.neutron [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Port 5b226cbf-df38-4b34-b591-7afc6de0a88c binding to destination host cpu-1 is already ACTIVE {{(pid=62070) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1000.565941] env[62070]: DEBUG nova.compute.utils [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1000.570057] env[62070]: DEBUG nova.compute.manager [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1000.570251] env[62070]: DEBUG nova.network.neutron [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1000.613674] env[62070]: DEBUG nova.policy [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '764054cb62c34dfba758826f43a6fdaa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e85c0cc8e0f544bfbb76970d3123fbb7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 1000.707932] env[62070]: DEBUG oslo_vmware.api [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122238, 'name': ReconfigVM_Task, 'duration_secs': 0.265936} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.710866] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Updating instance '71aead12-a182-40a7-b5a9-91c01271b800' progress to 33 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1000.797333] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquiring lock "refresh_cache-b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.797478] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquired lock "refresh_cache-b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.797626] env[62070]: DEBUG nova.network.neutron [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1000.833789] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d401484c-f3f5-42f6-a591-0b9bce93fab6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.841687] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3806166-edd2-4320-9d9e-eb8aaca1986e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.873226] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e23c271-4845-4252-a15a-6c5fd88eabb9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.881145] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8311d811-3d71-435d-9960-a16355770759 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.894407] env[62070]: DEBUG nova.compute.provider_tree [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1000.915945] env[62070]: DEBUG nova.network.neutron [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Successfully created port: a423c3f9-0b61-4375-9131-a98f082c1193 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1000.938257] env[62070]: DEBUG oslo_concurrency.lockutils [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.005244] env[62070]: DEBUG nova.network.neutron [-] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.071825] env[62070]: DEBUG nova.compute.manager [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1001.203745] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6634cfa8-faf4-49b4-9551-5727bdb6dbd8 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "7dc27fe6-495f-498d-88fe-a99ddc19a21c" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.262s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.217649] env[62070]: DEBUG nova.virt.hardware [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1001.217962] env[62070]: DEBUG nova.virt.hardware [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1001.218155] env[62070]: DEBUG nova.virt.hardware [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1001.218349] env[62070]: DEBUG nova.virt.hardware [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1001.218501] env[62070]: DEBUG nova.virt.hardware [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1001.218650] env[62070]: DEBUG nova.virt.hardware [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1001.218906] env[62070]: DEBUG nova.virt.hardware [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1001.219409] env[62070]: DEBUG nova.virt.hardware [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1001.219639] env[62070]: DEBUG nova.virt.hardware [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1001.219818] env[62070]: DEBUG nova.virt.hardware [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1001.220041] env[62070]: DEBUG nova.virt.hardware [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1001.225272] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Reconfiguring VM instance instance-00000022 to detach disk 2000 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1001.226140] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-16e9587f-f272-4cc4-a0ad-a8b228daeecf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.246877] env[62070]: DEBUG oslo_vmware.api [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1001.246877] env[62070]: value = "task-1122239" [ 1001.246877] env[62070]: _type = "Task" [ 1001.246877] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.256022] env[62070]: DEBUG oslo_vmware.api [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122239, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.340988] env[62070]: DEBUG nova.network.neutron [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1001.397240] env[62070]: DEBUG nova.scheduler.client.report [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1001.409079] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "7dc27fe6-495f-498d-88fe-a99ddc19a21c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.409362] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "7dc27fe6-495f-498d-88fe-a99ddc19a21c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.409577] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "7dc27fe6-495f-498d-88fe-a99ddc19a21c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.409767] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "7dc27fe6-495f-498d-88fe-a99ddc19a21c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.409937] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "7dc27fe6-495f-498d-88fe-a99ddc19a21c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.412133] env[62070]: INFO nova.compute.manager [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Terminating instance [ 1001.414533] env[62070]: DEBUG nova.compute.manager [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1001.414755] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1001.415750] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e8579f-2150-4a04-aed9-133eac4e34c3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.423474] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1001.423704] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f9afb751-172a-49dd-bcf5-0369ec20f209 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.429687] env[62070]: DEBUG oslo_vmware.api [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 1001.429687] env[62070]: value = "task-1122240" [ 1001.429687] env[62070]: _type = "Task" [ 1001.429687] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.440649] env[62070]: DEBUG oslo_vmware.api [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122240, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.476321] env[62070]: DEBUG nova.network.neutron [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Updating instance_info_cache with network_info: [{"id": "f30abb0e-6245-49cc-912a-4685dac5186b", "address": "fa:16:3e:bb:06:6b", "network": {"id": "fd8b220c-f20c-489e-9c20-28b886709536", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-452165452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1960779e94c4e119497a0c1117f54fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf30abb0e-62", "ovs_interfaceid": "f30abb0e-6245-49cc-912a-4685dac5186b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.481365] env[62070]: DEBUG nova.compute.manager [req-bca4c3fc-f4ff-4588-bedc-6a2090b88c14 req-618422e5-46cc-4aa2-91a4-133de40494cb service nova] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Received event network-vif-deleted-0b063dd9-e920-4bb9-88bf-9f98085170bb {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1001.485533] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "b101c79a-abfd-4104-aaed-096995fb2337-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.485810] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "b101c79a-abfd-4104-aaed-096995fb2337-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.486021] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "b101c79a-abfd-4104-aaed-096995fb2337-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.507637] env[62070]: INFO nova.compute.manager [-] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Took 1.22 seconds to deallocate network for instance. [ 1001.759114] env[62070]: DEBUG oslo_vmware.api [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122239, 'name': ReconfigVM_Task, 'duration_secs': 0.295198} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.759114] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Reconfigured VM instance instance-00000022 to detach disk 2000 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1001.759527] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-312c5e93-3a7b-4376-8591-9acef2cfd7c7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.783079] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Reconfiguring VM instance instance-00000022 to attach disk [datastore2] 71aead12-a182-40a7-b5a9-91c01271b800/71aead12-a182-40a7-b5a9-91c01271b800.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1001.783402] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-381d6395-473d-4866-a48b-85316967eaa5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.803472] env[62070]: DEBUG oslo_vmware.api [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1001.803472] env[62070]: value = "task-1122241" [ 1001.803472] env[62070]: _type = "Task" [ 1001.803472] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.812240] env[62070]: DEBUG oslo_vmware.api [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122241, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.905012] env[62070]: DEBUG oslo_concurrency.lockutils [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.842s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.907598] env[62070]: DEBUG oslo_concurrency.lockutils [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.361s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.907924] env[62070]: DEBUG nova.objects.instance [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lazy-loading 'resources' on Instance uuid e4cf42ff-8440-42bc-b629-4b712fd94e99 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1001.927611] env[62070]: INFO nova.scheduler.client.report [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Deleted allocations for instance 21bcb1a6-833b-48f3-8ee2-0e49c64a104f [ 1001.942511] env[62070]: DEBUG oslo_vmware.api [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122240, 'name': PowerOffVM_Task, 'duration_secs': 0.202165} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.942776] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1001.942953] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1001.943226] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e4c9b5d8-aebe-4b56-8d23-13a9b9df647a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.985804] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Releasing lock "refresh_cache-b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.986196] env[62070]: DEBUG nova.compute.manager [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Instance network_info: |[{"id": "f30abb0e-6245-49cc-912a-4685dac5186b", "address": "fa:16:3e:bb:06:6b", "network": {"id": "fd8b220c-f20c-489e-9c20-28b886709536", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-452165452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1960779e94c4e119497a0c1117f54fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf30abb0e-62", "ovs_interfaceid": "f30abb0e-6245-49cc-912a-4685dac5186b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1001.986629] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:06:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7bcd9d2d-25c8-41ad-9a4a-93b9029ba993', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f30abb0e-6245-49cc-912a-4685dac5186b', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1001.994440] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Creating folder: Project (e1960779e94c4e119497a0c1117f54fc). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1001.998233] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3e902837-9271-41c2-915a-cfba89f22b69 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.014716] env[62070]: DEBUG oslo_concurrency.lockutils [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.015151] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1002.015498] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1002.015815] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Deleting the datastore file [datastore2] 7dc27fe6-495f-498d-88fe-a99ddc19a21c {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1002.017704] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f476734a-5e34-4f9e-ab25-6cc14c6b0c63 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.019800] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Created folder: Project (e1960779e94c4e119497a0c1117f54fc) in parent group-v245319. [ 1002.019986] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Creating folder: Instances. Parent ref: group-v245483. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1002.020295] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8fef789e-9ce7-49dd-a8d5-b6c68d3a6929 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.027068] env[62070]: DEBUG oslo_vmware.api [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 1002.027068] env[62070]: value = "task-1122244" [ 1002.027068] env[62070]: _type = "Task" [ 1002.027068] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.036803] env[62070]: DEBUG oslo_vmware.api [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122244, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.037698] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Created folder: Instances in parent group-v245483. [ 1002.037983] env[62070]: DEBUG oslo.service.loopingcall [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1002.038197] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1002.038407] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-952e0916-49c1-4588-8f7f-cdb8ec1628fb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.060240] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1002.060240] env[62070]: value = "task-1122246" [ 1002.060240] env[62070]: _type = "Task" [ 1002.060240] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.070990] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122246, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.090061] env[62070]: DEBUG nova.compute.manager [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1002.120723] env[62070]: DEBUG nova.virt.hardware [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1002.121070] env[62070]: DEBUG nova.virt.hardware [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1002.121309] env[62070]: DEBUG nova.virt.hardware [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1002.121611] env[62070]: DEBUG nova.virt.hardware [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1002.121763] env[62070]: DEBUG nova.virt.hardware [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1002.122014] env[62070]: DEBUG nova.virt.hardware [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1002.122320] env[62070]: DEBUG nova.virt.hardware [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1002.122544] env[62070]: DEBUG nova.virt.hardware [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1002.122783] env[62070]: DEBUG nova.virt.hardware [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1002.123034] env[62070]: DEBUG nova.virt.hardware [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1002.123464] env[62070]: DEBUG nova.virt.hardware [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1002.124653] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5f08f95-cc60-4afb-86af-98dcd04f4444 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.133512] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4def6b74-d3b3-465d-9b55-888163169800 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.316701] env[62070]: DEBUG oslo_vmware.api [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122241, 'name': ReconfigVM_Task, 'duration_secs': 0.301601} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.317156] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Reconfigured VM instance instance-00000022 to attach disk [datastore2] 71aead12-a182-40a7-b5a9-91c01271b800/71aead12-a182-40a7-b5a9-91c01271b800.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1002.317602] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Updating instance '71aead12-a182-40a7-b5a9-91c01271b800' progress to 50 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1002.330770] env[62070]: DEBUG nova.compute.manager [req-b967efdf-519f-46d7-a224-c385b765a219 req-af8c41b2-0968-4b75-ab40-4cc3d95331b5 service nova] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Received event network-changed-f30abb0e-6245-49cc-912a-4685dac5186b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1002.330971] env[62070]: DEBUG nova.compute.manager [req-b967efdf-519f-46d7-a224-c385b765a219 req-af8c41b2-0968-4b75-ab40-4cc3d95331b5 service nova] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Refreshing instance network info cache due to event network-changed-f30abb0e-6245-49cc-912a-4685dac5186b. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1002.331223] env[62070]: DEBUG oslo_concurrency.lockutils [req-b967efdf-519f-46d7-a224-c385b765a219 req-af8c41b2-0968-4b75-ab40-4cc3d95331b5 service nova] Acquiring lock "refresh_cache-b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.331374] env[62070]: DEBUG oslo_concurrency.lockutils [req-b967efdf-519f-46d7-a224-c385b765a219 req-af8c41b2-0968-4b75-ab40-4cc3d95331b5 service nova] Acquired lock "refresh_cache-b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.331545] env[62070]: DEBUG nova.network.neutron [req-b967efdf-519f-46d7-a224-c385b765a219 req-af8c41b2-0968-4b75-ab40-4cc3d95331b5 service nova] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Refreshing network info cache for port f30abb0e-6245-49cc-912a-4685dac5186b {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1002.350213] env[62070]: DEBUG nova.virt.hardware [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1002.350595] env[62070]: DEBUG nova.virt.hardware [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1002.350842] env[62070]: DEBUG nova.virt.hardware [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1002.351187] env[62070]: DEBUG nova.virt.hardware [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1002.351363] env[62070]: DEBUG nova.virt.hardware [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1002.353549] env[62070]: DEBUG nova.virt.hardware [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1002.353549] env[62070]: DEBUG nova.virt.hardware [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1002.353549] env[62070]: DEBUG nova.virt.hardware [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1002.353549] env[62070]: DEBUG nova.virt.hardware [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1002.353549] env[62070]: DEBUG nova.virt.hardware [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1002.353549] env[62070]: DEBUG nova.virt.hardware [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1002.354075] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f7db2b-6726-42e8-b6cd-29dc0012f728 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.363414] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-204e7817-68c7-4f29-b194-73da1d080513 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.378510] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:48:de:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cf5bfbae-a882-4d34-be33-b31e274b3077', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0e90b544-5a90-4009-8f52-635e393cf106', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1002.386119] env[62070]: DEBUG oslo.service.loopingcall [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1002.387196] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1002.387441] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9a70e108-f08f-4253-9da7-9475cfd15e9e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.408336] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1002.408336] env[62070]: value = "task-1122247" [ 1002.408336] env[62070]: _type = "Task" [ 1002.408336] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.420829] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122247, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.442108] env[62070]: DEBUG oslo_concurrency.lockutils [None req-910bde70-333a-4387-adb0-10c731f0cbeb tempest-AttachInterfacesTestJSON-183791509 tempest-AttachInterfacesTestJSON-183791509-project-member] Lock "21bcb1a6-833b-48f3-8ee2-0e49c64a104f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.933s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.544244] env[62070]: DEBUG oslo_vmware.api [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122244, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.219439} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.544608] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1002.544806] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1002.545073] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1002.545189] env[62070]: INFO nova.compute.manager [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1002.545449] env[62070]: DEBUG oslo.service.loopingcall [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1002.545650] env[62070]: DEBUG nova.compute.manager [-] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1002.545748] env[62070]: DEBUG nova.network.neutron [-] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1002.574303] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122246, 'name': CreateVM_Task, 'duration_secs': 0.392179} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.577813] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1002.579182] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "refresh_cache-b101c79a-abfd-4104-aaed-096995fb2337" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.579364] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired lock "refresh_cache-b101c79a-abfd-4104-aaed-096995fb2337" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.579856] env[62070]: DEBUG nova.network.neutron [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1002.582091] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.582284] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.582611] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1002.583544] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdc351cd-93aa-4a59-aa81-d30aa9b4729e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.591151] env[62070]: DEBUG oslo_vmware.api [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Waiting for the task: (returnval){ [ 1002.591151] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52a95a02-c04b-519f-1dd4-df460fe08c35" [ 1002.591151] env[62070]: _type = "Task" [ 1002.591151] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.604817] env[62070]: DEBUG oslo_vmware.api [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52a95a02-c04b-519f-1dd4-df460fe08c35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.638926] env[62070]: DEBUG nova.network.neutron [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Successfully updated port: a423c3f9-0b61-4375-9131-a98f082c1193 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1002.720263] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab81ef4-723a-45ab-b285-7044e0107e30 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.729398] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593a0ed4-1a8c-42d0-9b90-06bcc5f2bfac {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.762468] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fbd9f44-5ec6-4783-ab5d-b48ff03c5fc8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.771879] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c828a12-0354-4639-bb65-c45af1815c7e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.786202] env[62070]: DEBUG nova.compute.provider_tree [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1002.830990] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62dae329-02f8-4da9-b950-5767eb04718a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.854260] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84ab358-5871-4021-b2c3-d4467582abd0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.874197] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Updating instance '71aead12-a182-40a7-b5a9-91c01271b800' progress to 67 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1002.921915] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122247, 'name': CreateVM_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.101548] env[62070]: DEBUG oslo_vmware.api [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52a95a02-c04b-519f-1dd4-df460fe08c35, 'name': SearchDatastore_Task, 'duration_secs': 0.014275} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.102046] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.102964] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1003.103657] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.103867] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.104124] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1003.104457] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b09da79f-1947-4f26-ac56-c6ab6f0837e3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.129140] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1003.129399] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1003.130467] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fac4849-e2f7-4548-89e1-06e3fa414ebd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.141612] env[62070]: DEBUG oslo_vmware.api [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Waiting for the task: (returnval){ [ 1003.141612] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5297f4a2-d958-7cab-59a2-2a4848d94be0" [ 1003.141612] env[62070]: _type = "Task" [ 1003.141612] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.150559] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "refresh_cache-5cccd79d-d243-49db-8581-718dd594f3b3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.150679] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquired lock "refresh_cache-5cccd79d-d243-49db-8581-718dd594f3b3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.150790] env[62070]: DEBUG nova.network.neutron [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1003.163086] env[62070]: DEBUG oslo_vmware.api [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5297f4a2-d958-7cab-59a2-2a4848d94be0, 'name': SearchDatastore_Task, 'duration_secs': 0.01302} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.163748] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e9c1eb1-b4ae-4ca6-a5c3-7760055bdfae {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.170718] env[62070]: DEBUG oslo_vmware.api [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Waiting for the task: (returnval){ [ 1003.170718] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]523e1b33-782b-7584-98a5-f6b58a44d50d" [ 1003.170718] env[62070]: _type = "Task" [ 1003.170718] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.180604] env[62070]: DEBUG oslo_vmware.api [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]523e1b33-782b-7584-98a5-f6b58a44d50d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.240246] env[62070]: DEBUG nova.network.neutron [req-b967efdf-519f-46d7-a224-c385b765a219 req-af8c41b2-0968-4b75-ab40-4cc3d95331b5 service nova] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Updated VIF entry in instance network info cache for port f30abb0e-6245-49cc-912a-4685dac5186b. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1003.240800] env[62070]: DEBUG nova.network.neutron [req-b967efdf-519f-46d7-a224-c385b765a219 req-af8c41b2-0968-4b75-ab40-4cc3d95331b5 service nova] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Updating instance_info_cache with network_info: [{"id": "f30abb0e-6245-49cc-912a-4685dac5186b", "address": "fa:16:3e:bb:06:6b", "network": {"id": "fd8b220c-f20c-489e-9c20-28b886709536", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-452165452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1960779e94c4e119497a0c1117f54fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf30abb0e-62", "ovs_interfaceid": "f30abb0e-6245-49cc-912a-4685dac5186b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.290388] env[62070]: DEBUG nova.scheduler.client.report [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1003.421739] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122247, 'name': CreateVM_Task, 'duration_secs': 0.763846} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.421918] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1003.422618] env[62070]: DEBUG oslo_concurrency.lockutils [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.422920] env[62070]: DEBUG oslo_concurrency.lockutils [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.423319] env[62070]: DEBUG oslo_concurrency.lockutils [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1003.423631] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd9b89c9-4822-4335-941e-13940c17dd1d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.425964] env[62070]: DEBUG nova.network.neutron [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Port a3ed0957-14c2-4144-8d45-f4a0e5cb45ab binding to destination host cpu-1 is already ACTIVE {{(pid=62070) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1003.431179] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1003.431179] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5215bf5e-4cfb-b520-ae9a-08dbc7636213" [ 1003.431179] env[62070]: _type = "Task" [ 1003.431179] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.439793] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5215bf5e-4cfb-b520-ae9a-08dbc7636213, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.450141] env[62070]: DEBUG nova.network.neutron [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Updating instance_info_cache with network_info: [{"id": "5b226cbf-df38-4b34-b591-7afc6de0a88c", "address": "fa:16:3e:a3:e3:84", "network": {"id": "4888f989-958d-49ff-bf5a-06873e4cc624", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-906255456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d079c0ef3ed745fcaf69dc728dca4466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b226cbf-df", "ovs_interfaceid": "5b226cbf-df38-4b34-b591-7afc6de0a88c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.502236] env[62070]: DEBUG nova.compute.manager [req-7fb1dbf9-4437-4174-850e-a565c952815f req-5b27c113-7ceb-4208-a6c9-5d65f84ba570 service nova] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Received event network-vif-plugged-a423c3f9-0b61-4375-9131-a98f082c1193 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1003.502461] env[62070]: DEBUG oslo_concurrency.lockutils [req-7fb1dbf9-4437-4174-850e-a565c952815f req-5b27c113-7ceb-4208-a6c9-5d65f84ba570 service nova] Acquiring lock "5cccd79d-d243-49db-8581-718dd594f3b3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.502672] env[62070]: DEBUG oslo_concurrency.lockutils [req-7fb1dbf9-4437-4174-850e-a565c952815f req-5b27c113-7ceb-4208-a6c9-5d65f84ba570 service nova] Lock "5cccd79d-d243-49db-8581-718dd594f3b3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.502840] env[62070]: DEBUG oslo_concurrency.lockutils [req-7fb1dbf9-4437-4174-850e-a565c952815f req-5b27c113-7ceb-4208-a6c9-5d65f84ba570 service nova] Lock "5cccd79d-d243-49db-8581-718dd594f3b3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.503190] env[62070]: DEBUG nova.compute.manager [req-7fb1dbf9-4437-4174-850e-a565c952815f req-5b27c113-7ceb-4208-a6c9-5d65f84ba570 service nova] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] No waiting events found dispatching network-vif-plugged-a423c3f9-0b61-4375-9131-a98f082c1193 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1003.503407] env[62070]: WARNING nova.compute.manager [req-7fb1dbf9-4437-4174-850e-a565c952815f req-5b27c113-7ceb-4208-a6c9-5d65f84ba570 service nova] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Received unexpected event network-vif-plugged-a423c3f9-0b61-4375-9131-a98f082c1193 for instance with vm_state building and task_state spawning. [ 1003.503580] env[62070]: DEBUG nova.compute.manager [req-7fb1dbf9-4437-4174-850e-a565c952815f req-5b27c113-7ceb-4208-a6c9-5d65f84ba570 service nova] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Received event network-changed-a423c3f9-0b61-4375-9131-a98f082c1193 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1003.503736] env[62070]: DEBUG nova.compute.manager [req-7fb1dbf9-4437-4174-850e-a565c952815f req-5b27c113-7ceb-4208-a6c9-5d65f84ba570 service nova] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Refreshing instance network info cache due to event network-changed-a423c3f9-0b61-4375-9131-a98f082c1193. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1003.503915] env[62070]: DEBUG oslo_concurrency.lockutils [req-7fb1dbf9-4437-4174-850e-a565c952815f req-5b27c113-7ceb-4208-a6c9-5d65f84ba570 service nova] Acquiring lock "refresh_cache-5cccd79d-d243-49db-8581-718dd594f3b3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.685662] env[62070]: DEBUG oslo_vmware.api [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]523e1b33-782b-7584-98a5-f6b58a44d50d, 'name': SearchDatastore_Task, 'duration_secs': 0.009456} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.685978] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.686473] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae/b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1003.686763] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-15610bbd-fb22-4d27-b5fb-42f02502b028 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.689404] env[62070]: DEBUG nova.network.neutron [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1003.697512] env[62070]: DEBUG oslo_vmware.api [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Waiting for the task: (returnval){ [ 1003.697512] env[62070]: value = "task-1122248" [ 1003.697512] env[62070]: _type = "Task" [ 1003.697512] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.707113] env[62070]: DEBUG oslo_vmware.api [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122248, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.747766] env[62070]: DEBUG oslo_concurrency.lockutils [req-b967efdf-519f-46d7-a224-c385b765a219 req-af8c41b2-0968-4b75-ab40-4cc3d95331b5 service nova] Releasing lock "refresh_cache-b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.795851] env[62070]: DEBUG oslo_concurrency.lockutils [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.888s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.799281] env[62070]: DEBUG oslo_concurrency.lockutils [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.861s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.799747] env[62070]: DEBUG nova.objects.instance [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lazy-loading 'resources' on Instance uuid 6cba961f-f9f9-4d3c-853a-049a014c9dbb {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1003.823807] env[62070]: INFO nova.scheduler.client.report [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Deleted allocations for instance e4cf42ff-8440-42bc-b629-4b712fd94e99 [ 1003.858502] env[62070]: DEBUG nova.network.neutron [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Updating instance_info_cache with network_info: [{"id": "a423c3f9-0b61-4375-9131-a98f082c1193", "address": "fa:16:3e:15:82:ce", "network": {"id": "443d2d62-bcef-44b2-814a-3e5dc50abc04", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-772061432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85c0cc8e0f544bfbb76970d3123fbb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa423c3f9-0b", "ovs_interfaceid": "a423c3f9-0b61-4375-9131-a98f082c1193", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.947280] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5215bf5e-4cfb-b520-ae9a-08dbc7636213, 'name': SearchDatastore_Task, 'duration_secs': 0.009821} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.947738] env[62070]: DEBUG oslo_concurrency.lockutils [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.948173] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1003.948550] env[62070]: DEBUG oslo_concurrency.lockutils [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.948814] env[62070]: DEBUG oslo_concurrency.lockutils [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.949085] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1003.949866] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6252c01b-5ae1-4562-9028-3c351c0ed1f3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.953046] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Releasing lock "refresh_cache-b101c79a-abfd-4104-aaed-096995fb2337" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.974369] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1003.974581] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1003.975448] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55a33bc8-5ce6-494b-bed3-c255fd676329 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.978631] env[62070]: DEBUG nova.network.neutron [-] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.985340] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1003.985340] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52505774-29ac-0218-6a64-b07836947294" [ 1003.985340] env[62070]: _type = "Task" [ 1003.985340] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.996000] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52505774-29ac-0218-6a64-b07836947294, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.209731] env[62070]: DEBUG oslo_vmware.api [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122248, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.450395} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.209912] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae/b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1004.210155] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1004.210417] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6eb1b751-a3ec-4f35-a343-63f0792f87bf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.219249] env[62070]: DEBUG oslo_vmware.api [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Waiting for the task: (returnval){ [ 1004.219249] env[62070]: value = "task-1122249" [ 1004.219249] env[62070]: _type = "Task" [ 1004.219249] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.226567] env[62070]: DEBUG oslo_vmware.api [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122249, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.332033] env[62070]: DEBUG oslo_concurrency.lockutils [None req-63c53e37-78c7-4c69-b978-95d3e246ef28 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "e4cf42ff-8440-42bc-b629-4b712fd94e99" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.084s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.359966] env[62070]: DEBUG nova.compute.manager [req-c677f2e7-b58c-4b84-9a65-b3ab348cead2 req-958695a7-331e-474e-8375-6cea5905d138 service nova] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Received event network-vif-deleted-cac26624-11c7-45a9-acb3-3e86b7232ab2 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1004.360676] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Releasing lock "refresh_cache-5cccd79d-d243-49db-8581-718dd594f3b3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.361609] env[62070]: DEBUG nova.compute.manager [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Instance network_info: |[{"id": "a423c3f9-0b61-4375-9131-a98f082c1193", "address": "fa:16:3e:15:82:ce", "network": {"id": "443d2d62-bcef-44b2-814a-3e5dc50abc04", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-772061432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85c0cc8e0f544bfbb76970d3123fbb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa423c3f9-0b", "ovs_interfaceid": "a423c3f9-0b61-4375-9131-a98f082c1193", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1004.362781] env[62070]: DEBUG oslo_concurrency.lockutils [req-7fb1dbf9-4437-4174-850e-a565c952815f req-5b27c113-7ceb-4208-a6c9-5d65f84ba570 service nova] Acquired lock "refresh_cache-5cccd79d-d243-49db-8581-718dd594f3b3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.362981] env[62070]: DEBUG nova.network.neutron [req-7fb1dbf9-4437-4174-850e-a565c952815f req-5b27c113-7ceb-4208-a6c9-5d65f84ba570 service nova] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Refreshing network info cache for port a423c3f9-0b61-4375-9131-a98f082c1193 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1004.367125] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:82:ce', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e1049e8-c06b-4c93-a9e1-2cbb530f3f95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a423c3f9-0b61-4375-9131-a98f082c1193', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1004.375653] env[62070]: DEBUG oslo.service.loopingcall [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1004.379523] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1004.380016] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9f615078-efe4-40e0-a87b-3b215dc5c9fa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.408652] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1004.408652] env[62070]: value = "task-1122250" [ 1004.408652] env[62070]: _type = "Task" [ 1004.408652] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.425494] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122250, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.457981] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "71aead12-a182-40a7-b5a9-91c01271b800-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.458342] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "71aead12-a182-40a7-b5a9-91c01271b800-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.458509] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "71aead12-a182-40a7-b5a9-91c01271b800-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.479320] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5f7fe67-6764-459d-96e1-14522ee72c60 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.486018] env[62070]: INFO nova.compute.manager [-] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Took 1.94 seconds to deallocate network for instance. [ 1004.512326] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf29e14-8ce6-4bfd-b335-e19604588fef {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.521703] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Updating instance 'b101c79a-abfd-4104-aaed-096995fb2337' progress to 83 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1004.533022] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52505774-29ac-0218-6a64-b07836947294, 'name': SearchDatastore_Task, 'duration_secs': 0.058944} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.533022] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7d4ba2c-c7c0-4bf3-9fad-1b61a96acf83 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.537672] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1004.537672] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]527eb3e9-81c1-faae-3c7c-cb94f8477e56" [ 1004.537672] env[62070]: _type = "Task" [ 1004.537672] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.549153] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527eb3e9-81c1-faae-3c7c-cb94f8477e56, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.620822] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c701d06f-1575-440d-a6ce-8cf1c86e337c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.628542] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6030bf0-c0ad-4e51-b3ff-99c955c730d0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.662808] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46f8deee-6393-4c33-af5a-a73ff1ecbb3a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.672413] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-110f1fd9-f119-4869-b7f4-4cb8732fd53d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.686071] env[62070]: DEBUG nova.compute.provider_tree [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1004.728462] env[62070]: DEBUG oslo_vmware.api [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122249, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06344} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.728743] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1004.729543] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8dae980-fb04-4716-be91-6176f225cd92 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.753042] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae/b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1004.753382] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8162fc8c-f1fa-4c47-a8c1-76fb2611c711 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.776310] env[62070]: DEBUG oslo_vmware.api [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Waiting for the task: (returnval){ [ 1004.776310] env[62070]: value = "task-1122251" [ 1004.776310] env[62070]: _type = "Task" [ 1004.776310] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.785487] env[62070]: DEBUG oslo_vmware.api [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122251, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.922843] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122250, 'name': CreateVM_Task, 'duration_secs': 0.399987} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.926039] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1004.926039] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1004.926039] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.926039] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1004.926039] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a52161d3-4a1b-49c5-bb7b-4d62e940a254 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.930163] env[62070]: DEBUG oslo_vmware.api [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 1004.930163] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]527f896f-330e-1b8a-d8ac-737dbbe60381" [ 1004.930163] env[62070]: _type = "Task" [ 1004.930163] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.940534] env[62070]: DEBUG oslo_vmware.api [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527f896f-330e-1b8a-d8ac-737dbbe60381, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.018632] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.034021] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1005.034021] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-54fa1a6e-5302-472c-8143-b3323a7adbde {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.045758] env[62070]: DEBUG oslo_vmware.api [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 1005.045758] env[62070]: value = "task-1122252" [ 1005.045758] env[62070]: _type = "Task" [ 1005.045758] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.054360] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527eb3e9-81c1-faae-3c7c-cb94f8477e56, 'name': SearchDatastore_Task, 'duration_secs': 0.011691} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.054360] env[62070]: DEBUG oslo_concurrency.lockutils [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.054622] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 84c00e4a-20d3-4739-8535-e27076d85a89/84c00e4a-20d3-4739-8535-e27076d85a89.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1005.054938] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e93eed81-7a21-4bb4-a6a1-a045aae47144 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.060701] env[62070]: DEBUG oslo_vmware.api [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122252, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.066523] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1005.066523] env[62070]: value = "task-1122253" [ 1005.066523] env[62070]: _type = "Task" [ 1005.066523] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.076560] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122253, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.172373] env[62070]: DEBUG nova.network.neutron [req-7fb1dbf9-4437-4174-850e-a565c952815f req-5b27c113-7ceb-4208-a6c9-5d65f84ba570 service nova] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Updated VIF entry in instance network info cache for port a423c3f9-0b61-4375-9131-a98f082c1193. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1005.172764] env[62070]: DEBUG nova.network.neutron [req-7fb1dbf9-4437-4174-850e-a565c952815f req-5b27c113-7ceb-4208-a6c9-5d65f84ba570 service nova] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Updating instance_info_cache with network_info: [{"id": "a423c3f9-0b61-4375-9131-a98f082c1193", "address": "fa:16:3e:15:82:ce", "network": {"id": "443d2d62-bcef-44b2-814a-3e5dc50abc04", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-772061432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85c0cc8e0f544bfbb76970d3123fbb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa423c3f9-0b", "ovs_interfaceid": "a423c3f9-0b61-4375-9131-a98f082c1193", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.188764] env[62070]: DEBUG nova.scheduler.client.report [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1005.286872] env[62070]: DEBUG oslo_vmware.api [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122251, 'name': ReconfigVM_Task, 'duration_secs': 0.375643} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.287228] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Reconfigured VM instance instance-0000005e to attach disk [datastore2] b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae/b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1005.287953] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aa7147e1-2a91-49ad-9aab-0371e4dc0003 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.295533] env[62070]: DEBUG oslo_vmware.api [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Waiting for the task: (returnval){ [ 1005.295533] env[62070]: value = "task-1122254" [ 1005.295533] env[62070]: _type = "Task" [ 1005.295533] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.303980] env[62070]: DEBUG oslo_vmware.api [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122254, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.445307] env[62070]: DEBUG oslo_vmware.api [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527f896f-330e-1b8a-d8ac-737dbbe60381, 'name': SearchDatastore_Task, 'duration_secs': 0.010315} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.445779] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.446840] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1005.447328] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1005.447768] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.448139] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1005.448573] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-980d447c-7f8f-4ce3-a79f-db95291b3914 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.470936] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1005.471247] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1005.472793] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0451720d-faaa-4aa5-8fa1-c8f70f3b423f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.481286] env[62070]: DEBUG oslo_vmware.api [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 1005.481286] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]528f60e7-c8e0-626a-ac86-1ee1413cb0c1" [ 1005.481286] env[62070]: _type = "Task" [ 1005.481286] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.489660] env[62070]: DEBUG oslo_vmware.api [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]528f60e7-c8e0-626a-ac86-1ee1413cb0c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.505391] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1005.505747] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.505885] env[62070]: DEBUG nova.network.neutron [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1005.559102] env[62070]: DEBUG oslo_vmware.api [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122252, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.576902] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122253, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.676136] env[62070]: DEBUG oslo_concurrency.lockutils [req-7fb1dbf9-4437-4174-850e-a565c952815f req-5b27c113-7ceb-4208-a6c9-5d65f84ba570 service nova] Releasing lock "refresh_cache-5cccd79d-d243-49db-8581-718dd594f3b3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.694360] env[62070]: DEBUG oslo_concurrency.lockutils [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.895s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.696795] env[62070]: DEBUG oslo_concurrency.lockutils [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.682s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.697066] env[62070]: DEBUG nova.objects.instance [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lazy-loading 'resources' on Instance uuid f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1005.725890] env[62070]: INFO nova.scheduler.client.report [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Deleted allocations for instance 6cba961f-f9f9-4d3c-853a-049a014c9dbb [ 1005.806834] env[62070]: DEBUG oslo_vmware.api [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122254, 'name': Rename_Task, 'duration_secs': 0.273877} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.807079] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1005.807344] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-050b02f5-a8bb-4da0-84b0-f727d2baeae6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.814412] env[62070]: DEBUG oslo_vmware.api [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Waiting for the task: (returnval){ [ 1005.814412] env[62070]: value = "task-1122255" [ 1005.814412] env[62070]: _type = "Task" [ 1005.814412] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.823470] env[62070]: DEBUG oslo_vmware.api [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122255, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.991722] env[62070]: DEBUG oslo_vmware.api [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]528f60e7-c8e0-626a-ac86-1ee1413cb0c1, 'name': SearchDatastore_Task, 'duration_secs': 0.059934} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.992680] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b4f42c5-475e-4947-9038-a9486bacce3f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.998649] env[62070]: DEBUG oslo_vmware.api [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 1005.998649] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]522269ab-5619-a3fd-397c-f326ba195a47" [ 1005.998649] env[62070]: _type = "Task" [ 1005.998649] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.007461] env[62070]: DEBUG oslo_vmware.api [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]522269ab-5619-a3fd-397c-f326ba195a47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.057642] env[62070]: DEBUG oslo_vmware.api [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122252, 'name': PowerOnVM_Task, 'duration_secs': 0.661303} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.058070] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1006.058376] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5af751bb-55de-4464-945f-4333bd2b85cc tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Updating instance 'b101c79a-abfd-4104-aaed-096995fb2337' progress to 100 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1006.085812] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122253, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.604618} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.088553] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 84c00e4a-20d3-4739-8535-e27076d85a89/84c00e4a-20d3-4739-8535-e27076d85a89.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1006.088553] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1006.088553] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c87a1ebe-0615-44ea-9664-3611b89919a2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.094617] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1006.094617] env[62070]: value = "task-1122256" [ 1006.094617] env[62070]: _type = "Task" [ 1006.094617] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.104951] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122256, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.240602] env[62070]: DEBUG oslo_concurrency.lockutils [None req-55bec81b-9bf5-490f-96d6-8638d78a6285 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "6cba961f-f9f9-4d3c-853a-049a014c9dbb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.540s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.327660] env[62070]: DEBUG oslo_vmware.api [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122255, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.438225] env[62070]: DEBUG nova.network.neutron [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Updating instance_info_cache with network_info: [{"id": "a3ed0957-14c2-4144-8d45-f4a0e5cb45ab", "address": "fa:16:3e:3c:6a:3d", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3ed0957-14", "ovs_interfaceid": "a3ed0957-14c2-4144-8d45-f4a0e5cb45ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.440941] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c5a275-8946-483a-a873-85c9e426f532 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.448117] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-535eabf6-8a8c-45d9-a741-ebc707c49abb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.480155] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46ee4213-4f90-416f-b0bd-97bce85049bb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.489762] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985ee0fa-6d67-4be8-9bcf-ff155aa8e23c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.505026] env[62070]: DEBUG nova.compute.provider_tree [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1006.513352] env[62070]: DEBUG oslo_vmware.api [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]522269ab-5619-a3fd-397c-f326ba195a47, 'name': SearchDatastore_Task, 'duration_secs': 0.011041} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.513627] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1006.513860] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 5cccd79d-d243-49db-8581-718dd594f3b3/5cccd79d-d243-49db-8581-718dd594f3b3.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1006.515088] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-28dc6dbe-eee3-4ee3-9bfb-9818dc1936d5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.520946] env[62070]: DEBUG oslo_vmware.api [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 1006.520946] env[62070]: value = "task-1122257" [ 1006.520946] env[62070]: _type = "Task" [ 1006.520946] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.531821] env[62070]: DEBUG oslo_vmware.api [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122257, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.605625] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122256, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06917} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.605912] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1006.606705] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0152ac4-80dd-49ff-9fb2-fac032dd2048 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.630549] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] 84c00e4a-20d3-4739-8535-e27076d85a89/84c00e4a-20d3-4739-8535-e27076d85a89.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1006.630923] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de8fc059-931f-4bac-b8d3-74f4de16051c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.650805] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1006.650805] env[62070]: value = "task-1122258" [ 1006.650805] env[62070]: _type = "Task" [ 1006.650805] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.659615] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122258, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.828173] env[62070]: DEBUG oslo_vmware.api [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122255, 'name': PowerOnVM_Task, 'duration_secs': 0.906271} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.828521] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1006.828696] env[62070]: INFO nova.compute.manager [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Took 7.17 seconds to spawn the instance on the hypervisor. [ 1006.831021] env[62070]: DEBUG nova.compute.manager [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1006.831021] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c81e46f-4c8d-4952-9ecc-2ed7ab0ac994 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.945041] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Releasing lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.009716] env[62070]: DEBUG nova.scheduler.client.report [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1007.032652] env[62070]: DEBUG oslo_vmware.api [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122257, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480762} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.034107] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 5cccd79d-d243-49db-8581-718dd594f3b3/5cccd79d-d243-49db-8581-718dd594f3b3.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1007.034107] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1007.034107] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8b93fd8d-7a1c-4e61-89f2-99eb93f2bb3c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.040072] env[62070]: DEBUG oslo_vmware.api [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 1007.040072] env[62070]: value = "task-1122259" [ 1007.040072] env[62070]: _type = "Task" [ 1007.040072] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.053346] env[62070]: DEBUG oslo_vmware.api [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122259, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.162330] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122258, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.174318] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "33d04e59-da01-4ba3-ac42-ab93372a332d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.174804] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "33d04e59-da01-4ba3-ac42-ab93372a332d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.350208] env[62070]: INFO nova.compute.manager [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Took 20.82 seconds to build instance. [ 1007.478163] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d2cbc3c-3836-4446-a6c6-bb17c599b5f5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.497755] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-897bbd93-7f39-46c8-906d-86cc0219bd97 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.506021] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Updating instance '71aead12-a182-40a7-b5a9-91c01271b800' progress to 83 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1007.517252] env[62070]: DEBUG oslo_concurrency.lockutils [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.818s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.517721] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.501s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.518223] env[62070]: DEBUG nova.objects.instance [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lazy-loading 'resources' on Instance uuid 7dc27fe6-495f-498d-88fe-a99ddc19a21c {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1007.558994] env[62070]: DEBUG oslo_vmware.api [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122259, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069637} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.560730] env[62070]: INFO nova.scheduler.client.report [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Deleted allocations for instance f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f [ 1007.561431] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1007.564683] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b9c731-ac7e-407c-a295-5ddba3c30428 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.597306] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] 5cccd79d-d243-49db-8581-718dd594f3b3/5cccd79d-d243-49db-8581-718dd594f3b3.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1007.599945] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ebf3d8a2-bff6-469e-9ae2-41520d5a6f77 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.627218] env[62070]: DEBUG oslo_vmware.api [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 1007.627218] env[62070]: value = "task-1122260" [ 1007.627218] env[62070]: _type = "Task" [ 1007.627218] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.636267] env[62070]: DEBUG oslo_vmware.api [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122260, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.662303] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122258, 'name': ReconfigVM_Task, 'duration_secs': 0.593806} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.662792] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Reconfigured VM instance instance-00000053 to attach disk [datastore2] 84c00e4a-20d3-4739-8535-e27076d85a89/84c00e4a-20d3-4739-8535-e27076d85a89.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1007.664507] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_format': None, 'guest_format': None, 'size': 0, 'boot_index': 0, 'device_name': '/dev/sda', 'device_type': 'disk', 'encryption_secret_uuid': None, 'disk_bus': None, 'encrypted': False, 'encryption_options': None, 'image_type': None, 'image_id': '43ea607c-7ece-4601-9b11-75c6a16aa7dd'}], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'boot_index': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245480', 'volume_id': '773d984d-4185-4716-a44d-6527016a9c86', 'name': 'volume-773d984d-4185-4716-a44d-6527016a9c86', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '84c00e4a-20d3-4739-8535-e27076d85a89', 'attached_at': '', 'detached_at': '', 'volume_id': '773d984d-4185-4716-a44d-6527016a9c86', 'serial': '773d984d-4185-4716-a44d-6527016a9c86'}, 'device_type': None, 'mount_device': '/dev/sdb', 'disk_bus': None, 'delete_on_termination': False, 'attachment_id': '804dca2e-f44c-407c-bc2d-1dfe0713264e', 'volume_type': None}], 'swap': None} {{(pid=62070) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1007.664730] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Volume attach. Driver type: vmdk {{(pid=62070) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1007.664968] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245480', 'volume_id': '773d984d-4185-4716-a44d-6527016a9c86', 'name': 'volume-773d984d-4185-4716-a44d-6527016a9c86', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '84c00e4a-20d3-4739-8535-e27076d85a89', 'attached_at': '', 'detached_at': '', 'volume_id': '773d984d-4185-4716-a44d-6527016a9c86', 'serial': '773d984d-4185-4716-a44d-6527016a9c86'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1007.666090] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d302208-f88f-4f6e-9cf1-92d00cc1030c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.682551] env[62070]: DEBUG nova.compute.manager [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1007.688615] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad82b1b2-8199-4e87-9662-135d4d14409e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.713444] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] volume-773d984d-4185-4716-a44d-6527016a9c86/volume-773d984d-4185-4716-a44d-6527016a9c86.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1007.713678] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aaab5985-08a9-4fcd-9430-e3ee812a491c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.732028] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1007.732028] env[62070]: value = "task-1122261" [ 1007.732028] env[62070]: _type = "Task" [ 1007.732028] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.741685] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122261, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.850989] env[62070]: DEBUG oslo_concurrency.lockutils [None req-93767b52-6617-4bde-8e1b-f022a96d60f4 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Lock "b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.334s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.013161] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1008.013475] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7560e981-c3a6-4364-96b0-633013891843 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.025538] env[62070]: DEBUG oslo_vmware.api [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1008.025538] env[62070]: value = "task-1122262" [ 1008.025538] env[62070]: _type = "Task" [ 1008.025538] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.038143] env[62070]: DEBUG nova.compute.manager [req-ce9e7836-0192-4038-9725-3913ef6703df req-92fa3c42-1314-45cd-8ac1-bde8dae34ab2 service nova] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Received event network-changed-f30abb0e-6245-49cc-912a-4685dac5186b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1008.038480] env[62070]: DEBUG nova.compute.manager [req-ce9e7836-0192-4038-9725-3913ef6703df req-92fa3c42-1314-45cd-8ac1-bde8dae34ab2 service nova] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Refreshing instance network info cache due to event network-changed-f30abb0e-6245-49cc-912a-4685dac5186b. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1008.038954] env[62070]: DEBUG oslo_concurrency.lockutils [req-ce9e7836-0192-4038-9725-3913ef6703df req-92fa3c42-1314-45cd-8ac1-bde8dae34ab2 service nova] Acquiring lock "refresh_cache-b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.039117] env[62070]: DEBUG oslo_concurrency.lockutils [req-ce9e7836-0192-4038-9725-3913ef6703df req-92fa3c42-1314-45cd-8ac1-bde8dae34ab2 service nova] Acquired lock "refresh_cache-b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.039340] env[62070]: DEBUG nova.network.neutron [req-ce9e7836-0192-4038-9725-3913ef6703df req-92fa3c42-1314-45cd-8ac1-bde8dae34ab2 service nova] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Refreshing network info cache for port f30abb0e-6245-49cc-912a-4685dac5186b {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1008.050857] env[62070]: DEBUG oslo_vmware.api [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122262, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.075030] env[62070]: DEBUG oslo_concurrency.lockutils [None req-70c22416-1d5b-4f27-873a-b05d521d4159 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.945s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.144595] env[62070]: DEBUG oslo_vmware.api [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122260, 'name': ReconfigVM_Task, 'duration_secs': 0.308935} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.145179] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Reconfigured VM instance instance-0000005f to attach disk [datastore2] 5cccd79d-d243-49db-8581-718dd594f3b3/5cccd79d-d243-49db-8581-718dd594f3b3.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1008.146059] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dfb525fa-b3a1-4189-a456-92b584f743fb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.161429] env[62070]: DEBUG oslo_vmware.api [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 1008.161429] env[62070]: value = "task-1122263" [ 1008.161429] env[62070]: _type = "Task" [ 1008.161429] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.171120] env[62070]: DEBUG oslo_vmware.api [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122263, 'name': Rename_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.213863] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.244216] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122261, 'name': ReconfigVM_Task, 'duration_secs': 0.343975} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.247696] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Reconfigured VM instance instance-00000053 to attach disk [datastore1] volume-773d984d-4185-4716-a44d-6527016a9c86/volume-773d984d-4185-4716-a44d-6527016a9c86.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1008.255149] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03324e4e-cfaf-4902-9941-de77c0e49ee3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.278450] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1008.278450] env[62070]: value = "task-1122264" [ 1008.278450] env[62070]: _type = "Task" [ 1008.278450] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.288857] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122264, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.339862] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "1d595bc8-ab51-4443-bf32-079078f3133b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.339862] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "1d595bc8-ab51-4443-bf32-079078f3133b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.003s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.339862] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "1d595bc8-ab51-4443-bf32-079078f3133b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.339862] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "1d595bc8-ab51-4443-bf32-079078f3133b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.340127] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "1d595bc8-ab51-4443-bf32-079078f3133b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.343504] env[62070]: INFO nova.compute.manager [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Terminating instance [ 1008.347932] env[62070]: DEBUG nova.compute.manager [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1008.348182] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1008.349080] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d2dc866-a852-42d3-8e06-acfb8dba244b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.354915] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe015bd6-726e-4a01-b07b-4a387f8ee639 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.366626] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1008.367835] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8ed737-f965-4ea1-aa91-76c038064444 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.372549] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bc3fa7b1-f345-4eba-9cc3-1cd774c903df {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.410325] env[62070]: DEBUG oslo_vmware.api [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 1008.410325] env[62070]: value = "task-1122265" [ 1008.410325] env[62070]: _type = "Task" [ 1008.410325] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.411251] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c718b0-f2c5-4f22-b7d3-8fc0c847497d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.425798] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c4b1b5-5f55-4312-afed-2d43f2a4121c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.430354] env[62070]: DEBUG oslo_vmware.api [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122265, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.441573] env[62070]: DEBUG nova.compute.provider_tree [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1008.519432] env[62070]: DEBUG nova.compute.manager [req-3859cd3a-73c6-4a0e-af95-efc46b0eefd7 req-8d18121b-7147-4fbe-ae4a-53c1eae4d86b service nova] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Received event network-changed-f30abb0e-6245-49cc-912a-4685dac5186b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1008.519667] env[62070]: DEBUG nova.compute.manager [req-3859cd3a-73c6-4a0e-af95-efc46b0eefd7 req-8d18121b-7147-4fbe-ae4a-53c1eae4d86b service nova] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Refreshing instance network info cache due to event network-changed-f30abb0e-6245-49cc-912a-4685dac5186b. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1008.519907] env[62070]: DEBUG oslo_concurrency.lockutils [req-3859cd3a-73c6-4a0e-af95-efc46b0eefd7 req-8d18121b-7147-4fbe-ae4a-53c1eae4d86b service nova] Acquiring lock "refresh_cache-b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.544032] env[62070]: DEBUG oslo_vmware.api [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122262, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.673848] env[62070]: DEBUG oslo_vmware.api [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122263, 'name': Rename_Task, 'duration_secs': 0.158594} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.673983] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1008.674259] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-36ed9b79-9e6b-4096-b0ea-4d895122ff06 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.681435] env[62070]: DEBUG oslo_vmware.api [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 1008.681435] env[62070]: value = "task-1122266" [ 1008.681435] env[62070]: _type = "Task" [ 1008.681435] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.695435] env[62070]: DEBUG oslo_vmware.api [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122266, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.789178] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122264, 'name': ReconfigVM_Task, 'duration_secs': 0.163257} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.789607] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245480', 'volume_id': '773d984d-4185-4716-a44d-6527016a9c86', 'name': 'volume-773d984d-4185-4716-a44d-6527016a9c86', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '84c00e4a-20d3-4739-8535-e27076d85a89', 'attached_at': '', 'detached_at': '', 'volume_id': '773d984d-4185-4716-a44d-6527016a9c86', 'serial': '773d984d-4185-4716-a44d-6527016a9c86'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1008.790217] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a60cdfde-87c5-4ef0-80ae-bb87b8286f55 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.796321] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1008.796321] env[62070]: value = "task-1122267" [ 1008.796321] env[62070]: _type = "Task" [ 1008.796321] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.804264] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122267, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.852170] env[62070]: DEBUG nova.network.neutron [req-ce9e7836-0192-4038-9725-3913ef6703df req-92fa3c42-1314-45cd-8ac1-bde8dae34ab2 service nova] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Updated VIF entry in instance network info cache for port f30abb0e-6245-49cc-912a-4685dac5186b. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1008.852743] env[62070]: DEBUG nova.network.neutron [req-ce9e7836-0192-4038-9725-3913ef6703df req-92fa3c42-1314-45cd-8ac1-bde8dae34ab2 service nova] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Updating instance_info_cache with network_info: [{"id": "f30abb0e-6245-49cc-912a-4685dac5186b", "address": "fa:16:3e:bb:06:6b", "network": {"id": "fd8b220c-f20c-489e-9c20-28b886709536", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-452165452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1960779e94c4e119497a0c1117f54fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf30abb0e-62", "ovs_interfaceid": "f30abb0e-6245-49cc-912a-4685dac5186b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.923631] env[62070]: DEBUG oslo_vmware.api [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122265, 'name': PowerOffVM_Task, 'duration_secs': 0.171455} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.924202] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1008.924202] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1008.924466] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd96a191-068c-4e5e-bb42-0603b295295d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.946099] env[62070]: DEBUG nova.scheduler.client.report [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1008.998259] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1008.998259] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1008.998259] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Deleting the datastore file [datastore2] 1d595bc8-ab51-4443-bf32-079078f3133b {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1008.998526] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-671dbf9c-efc6-41d0-bfef-a6a6697d0799 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.004513] env[62070]: DEBUG oslo_vmware.api [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 1009.004513] env[62070]: value = "task-1122269" [ 1009.004513] env[62070]: _type = "Task" [ 1009.004513] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.013263] env[62070]: DEBUG oslo_vmware.api [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122269, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.038718] env[62070]: DEBUG oslo_vmware.api [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122262, 'name': PowerOnVM_Task, 'duration_secs': 0.616093} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.039074] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1009.039295] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4c71a514-0b63-4572-bfc9-c9bc2441840c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Updating instance '71aead12-a182-40a7-b5a9-91c01271b800' progress to 100 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1009.051274] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1551e19b-6f87-42d6-8898-94554d769890 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "b101c79a-abfd-4104-aaed-096995fb2337" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.051513] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1551e19b-6f87-42d6-8898-94554d769890 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "b101c79a-abfd-4104-aaed-096995fb2337" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.051716] env[62070]: DEBUG nova.compute.manager [None req-1551e19b-6f87-42d6-8898-94554d769890 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Going to confirm migration 3 {{(pid=62070) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1009.194969] env[62070]: DEBUG oslo_vmware.api [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122266, 'name': PowerOnVM_Task, 'duration_secs': 0.480236} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.194969] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1009.194969] env[62070]: INFO nova.compute.manager [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Took 7.10 seconds to spawn the instance on the hypervisor. [ 1009.194969] env[62070]: DEBUG nova.compute.manager [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1009.195143] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa4f955-491e-45f7-a6dc-9ec623fba964 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.311028] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122267, 'name': Rename_Task, 'duration_secs': 0.217677} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.311028] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1009.311163] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a1662b1f-2931-4d75-89b8-fe47497df3a5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.318421] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1009.318421] env[62070]: value = "task-1122270" [ 1009.318421] env[62070]: _type = "Task" [ 1009.318421] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.329082] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122270, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.356279] env[62070]: DEBUG oslo_concurrency.lockutils [req-ce9e7836-0192-4038-9725-3913ef6703df req-92fa3c42-1314-45cd-8ac1-bde8dae34ab2 service nova] Releasing lock "refresh_cache-b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1009.356771] env[62070]: DEBUG oslo_concurrency.lockutils [req-3859cd3a-73c6-4a0e-af95-efc46b0eefd7 req-8d18121b-7147-4fbe-ae4a-53c1eae4d86b service nova] Acquired lock "refresh_cache-b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.357418] env[62070]: DEBUG nova.network.neutron [req-3859cd3a-73c6-4a0e-af95-efc46b0eefd7 req-8d18121b-7147-4fbe-ae4a-53c1eae4d86b service nova] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Refreshing network info cache for port f30abb0e-6245-49cc-912a-4685dac5186b {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1009.452541] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.935s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.457486] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.242s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.457486] env[62070]: INFO nova.compute.claims [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1009.480710] env[62070]: INFO nova.scheduler.client.report [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Deleted allocations for instance 7dc27fe6-495f-498d-88fe-a99ddc19a21c [ 1009.516983] env[62070]: DEBUG oslo_vmware.api [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122269, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.191604} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.517887] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1009.518166] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1009.518382] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1009.518685] env[62070]: INFO nova.compute.manager [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1009.519030] env[62070]: DEBUG oslo.service.loopingcall [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1009.519262] env[62070]: DEBUG nova.compute.manager [-] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1009.519356] env[62070]: DEBUG nova.network.neutron [-] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1009.632862] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1551e19b-6f87-42d6-8898-94554d769890 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "refresh_cache-b101c79a-abfd-4104-aaed-096995fb2337" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.633090] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1551e19b-6f87-42d6-8898-94554d769890 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired lock "refresh_cache-b101c79a-abfd-4104-aaed-096995fb2337" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.633290] env[62070]: DEBUG nova.network.neutron [None req-1551e19b-6f87-42d6-8898-94554d769890 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1009.633494] env[62070]: DEBUG nova.objects.instance [None req-1551e19b-6f87-42d6-8898-94554d769890 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lazy-loading 'info_cache' on Instance uuid b101c79a-abfd-4104-aaed-096995fb2337 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1009.716625] env[62070]: INFO nova.compute.manager [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Took 20.32 seconds to build instance. [ 1009.830578] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122270, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.911282] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4b17f16d-ee96-409a-b0d3-8d42aaac32b0 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "a5cba512-9b50-4ca3-93eb-345be12dc588" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.911708] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4b17f16d-ee96-409a-b0d3-8d42aaac32b0 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "a5cba512-9b50-4ca3-93eb-345be12dc588" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.990526] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3a47a147-5701-4ad4-a769-76d1ae8c53df tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "7dc27fe6-495f-498d-88fe-a99ddc19a21c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.581s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.145160] env[62070]: DEBUG nova.network.neutron [req-3859cd3a-73c6-4a0e-af95-efc46b0eefd7 req-8d18121b-7147-4fbe-ae4a-53c1eae4d86b service nova] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Updated VIF entry in instance network info cache for port f30abb0e-6245-49cc-912a-4685dac5186b. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1010.145160] env[62070]: DEBUG nova.network.neutron [req-3859cd3a-73c6-4a0e-af95-efc46b0eefd7 req-8d18121b-7147-4fbe-ae4a-53c1eae4d86b service nova] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Updating instance_info_cache with network_info: [{"id": "f30abb0e-6245-49cc-912a-4685dac5186b", "address": "fa:16:3e:bb:06:6b", "network": {"id": "fd8b220c-f20c-489e-9c20-28b886709536", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-452165452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1960779e94c4e119497a0c1117f54fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf30abb0e-62", "ovs_interfaceid": "f30abb0e-6245-49cc-912a-4685dac5186b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.223337] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9026fc09-39c2-473e-a4e2-4fc03ac7d742 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "5cccd79d-d243-49db-8581-718dd594f3b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.843s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.330987] env[62070]: DEBUG oslo_vmware.api [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122270, 'name': PowerOnVM_Task, 'duration_secs': 0.796872} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.331291] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1010.331505] env[62070]: DEBUG nova.compute.manager [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1010.332312] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f114a961-1fcc-4607-a01b-f29c58932512 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.411130] env[62070]: DEBUG nova.network.neutron [-] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.416649] env[62070]: DEBUG nova.compute.utils [None req-4b17f16d-ee96-409a-b0d3-8d42aaac32b0 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1010.648954] env[62070]: DEBUG oslo_concurrency.lockutils [req-3859cd3a-73c6-4a0e-af95-efc46b0eefd7 req-8d18121b-7147-4fbe-ae4a-53c1eae4d86b service nova] Releasing lock "refresh_cache-b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1010.723259] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91b1e7ce-e443-4123-8d04-4b44e1a0166b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.733483] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c05cc7d1-881b-4654-90f3-68e6daf3cf7d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.770458] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03dae33f-2957-4842-bdad-b2261a0e2056 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.778444] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46fcb9ab-205f-4f4e-a395-eb2df3346e8d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.793244] env[62070]: DEBUG nova.compute.provider_tree [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1010.852888] env[62070]: DEBUG oslo_concurrency.lockutils [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.913726] env[62070]: INFO nova.compute.manager [-] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Took 1.39 seconds to deallocate network for instance. [ 1010.919252] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4b17f16d-ee96-409a-b0d3-8d42aaac32b0 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "a5cba512-9b50-4ca3-93eb-345be12dc588" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.025400] env[62070]: DEBUG nova.network.neutron [None req-1551e19b-6f87-42d6-8898-94554d769890 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Updating instance_info_cache with network_info: [{"id": "5b226cbf-df38-4b34-b591-7afc6de0a88c", "address": "fa:16:3e:a3:e3:84", "network": {"id": "4888f989-958d-49ff-bf5a-06873e4cc624", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-906255456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d079c0ef3ed745fcaf69dc728dca4466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b226cbf-df", "ovs_interfaceid": "5b226cbf-df38-4b34-b591-7afc6de0a88c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.297651] env[62070]: DEBUG nova.scheduler.client.report [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1011.420508] env[62070]: DEBUG nova.compute.manager [req-4034816c-efda-4c7f-b531-34019016ca36 req-a04bd38e-19b2-426e-a37f-c2935c0fc79f service nova] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Received event network-vif-deleted-20687f56-b863-4fa4-8f5d-df461da5054b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1011.421458] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.528607] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1551e19b-6f87-42d6-8898-94554d769890 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Releasing lock "refresh_cache-b101c79a-abfd-4104-aaed-096995fb2337" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1011.528949] env[62070]: DEBUG nova.objects.instance [None req-1551e19b-6f87-42d6-8898-94554d769890 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lazy-loading 'migration_context' on Instance uuid b101c79a-abfd-4104-aaed-096995fb2337 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1011.803104] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.348s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.803663] env[62070]: DEBUG nova.compute.manager [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1011.806396] env[62070]: DEBUG oslo_concurrency.lockutils [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.954s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.806647] env[62070]: DEBUG nova.objects.instance [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62070) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1011.916995] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Acquiring lock "7bfda953-ac95-4dce-b7a7-c570eae35582" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.916995] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Lock "7bfda953-ac95-4dce-b7a7-c570eae35582" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.980897] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4b17f16d-ee96-409a-b0d3-8d42aaac32b0 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "a5cba512-9b50-4ca3-93eb-345be12dc588" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.980897] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4b17f16d-ee96-409a-b0d3-8d42aaac32b0 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "a5cba512-9b50-4ca3-93eb-345be12dc588" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.980897] env[62070]: INFO nova.compute.manager [None req-4b17f16d-ee96-409a-b0d3-8d42aaac32b0 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Attaching volume 0635ea59-c4ec-4e97-9bdd-1d58208eb929 to /dev/sdb [ 1012.019100] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2e4b2eb-c215-4389-9c5a-2856d0c1de89 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.028493] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d158d7-3c19-4931-8f20-6b1199d1c027 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.031900] env[62070]: DEBUG nova.objects.base [None req-1551e19b-6f87-42d6-8898-94554d769890 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62070) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1012.032617] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3489ad59-52f1-40fe-9f2a-00b19561d498 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.060076] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97810729-ffcd-4723-8dc7-0912d9874772 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.063359] env[62070]: DEBUG nova.virt.block_device [None req-4b17f16d-ee96-409a-b0d3-8d42aaac32b0 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Updating existing volume attachment record: 1d820f39-2e6d-4710-b849-6b03d57003b8 {{(pid=62070) _volume_attach /opt/stack/nova/nova/virt/block_device.py:679}} [ 1012.069281] env[62070]: DEBUG oslo_vmware.api [None req-1551e19b-6f87-42d6-8898-94554d769890 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 1012.069281] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5289f1b5-cf47-75a0-b95c-ec261b7a2846" [ 1012.069281] env[62070]: _type = "Task" [ 1012.069281] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.078024] env[62070]: DEBUG oslo_vmware.api [None req-1551e19b-6f87-42d6-8898-94554d769890 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5289f1b5-cf47-75a0-b95c-ec261b7a2846, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.217452] env[62070]: DEBUG nova.compute.manager [req-4f225cb7-2f43-469f-9e58-d0b37eb9a742 req-be07e15a-1b73-4f61-ad32-8de14c11052e service nova] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Received event network-changed-a423c3f9-0b61-4375-9131-a98f082c1193 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1012.217751] env[62070]: DEBUG nova.compute.manager [req-4f225cb7-2f43-469f-9e58-d0b37eb9a742 req-be07e15a-1b73-4f61-ad32-8de14c11052e service nova] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Refreshing instance network info cache due to event network-changed-a423c3f9-0b61-4375-9131-a98f082c1193. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1012.217997] env[62070]: DEBUG oslo_concurrency.lockutils [req-4f225cb7-2f43-469f-9e58-d0b37eb9a742 req-be07e15a-1b73-4f61-ad32-8de14c11052e service nova] Acquiring lock "refresh_cache-5cccd79d-d243-49db-8581-718dd594f3b3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1012.218141] env[62070]: DEBUG oslo_concurrency.lockutils [req-4f225cb7-2f43-469f-9e58-d0b37eb9a742 req-be07e15a-1b73-4f61-ad32-8de14c11052e service nova] Acquired lock "refresh_cache-5cccd79d-d243-49db-8581-718dd594f3b3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.218314] env[62070]: DEBUG nova.network.neutron [req-4f225cb7-2f43-469f-9e58-d0b37eb9a742 req-be07e15a-1b73-4f61-ad32-8de14c11052e service nova] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Refreshing network info cache for port a423c3f9-0b61-4375-9131-a98f082c1193 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1012.313282] env[62070]: DEBUG nova.compute.utils [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1012.317031] env[62070]: DEBUG nova.compute.manager [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1012.317377] env[62070]: DEBUG nova.network.neutron [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1012.335837] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2a9c8032-9eaa-42ed-988b-ffc6e40f7aff tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "71aead12-a182-40a7-b5a9-91c01271b800" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.336080] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2a9c8032-9eaa-42ed-988b-ffc6e40f7aff tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "71aead12-a182-40a7-b5a9-91c01271b800" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.337026] env[62070]: DEBUG nova.compute.manager [None req-2a9c8032-9eaa-42ed-988b-ffc6e40f7aff tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Going to confirm migration 4 {{(pid=62070) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1012.394911] env[62070]: DEBUG nova.policy [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f866f97eed1a41b39b4cd552102c6e21', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9191f0e6c2ee401abca64c0780e230bf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 1012.419411] env[62070]: DEBUG nova.compute.manager [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1012.580354] env[62070]: DEBUG oslo_vmware.api [None req-1551e19b-6f87-42d6-8898-94554d769890 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5289f1b5-cf47-75a0-b95c-ec261b7a2846, 'name': SearchDatastore_Task, 'duration_secs': 0.009686} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.580901] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1551e19b-6f87-42d6-8898-94554d769890 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.817515] env[62070]: DEBUG nova.compute.manager [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1012.822931] env[62070]: DEBUG oslo_concurrency.lockutils [None req-636c2657-6edc-4f64-8b0d-e6d14b26af48 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.824176] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.403s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.824666] env[62070]: DEBUG nova.objects.instance [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lazy-loading 'resources' on Instance uuid 1d595bc8-ab51-4443-bf32-079078f3133b {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1012.921907] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2a9c8032-9eaa-42ed-988b-ffc6e40f7aff tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1012.922144] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2a9c8032-9eaa-42ed-988b-ffc6e40f7aff tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.922307] env[62070]: DEBUG nova.network.neutron [None req-2a9c8032-9eaa-42ed-988b-ffc6e40f7aff tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1012.922941] env[62070]: DEBUG nova.objects.instance [None req-2a9c8032-9eaa-42ed-988b-ffc6e40f7aff tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lazy-loading 'info_cache' on Instance uuid 71aead12-a182-40a7-b5a9-91c01271b800 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1012.942238] env[62070]: DEBUG nova.network.neutron [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Successfully created port: 222fba36-759a-41f7-a82a-cb4047bd3725 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1012.948519] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.057176] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bfe789c7-9756-4165-ae60-a38857f0dfb6 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "84c00e4a-20d3-4739-8535-e27076d85a89" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.057176] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bfe789c7-9756-4165-ae60-a38857f0dfb6 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "84c00e4a-20d3-4739-8535-e27076d85a89" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.466397] env[62070]: DEBUG nova.network.neutron [req-4f225cb7-2f43-469f-9e58-d0b37eb9a742 req-be07e15a-1b73-4f61-ad32-8de14c11052e service nova] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Updated VIF entry in instance network info cache for port a423c3f9-0b61-4375-9131-a98f082c1193. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1013.466889] env[62070]: DEBUG nova.network.neutron [req-4f225cb7-2f43-469f-9e58-d0b37eb9a742 req-be07e15a-1b73-4f61-ad32-8de14c11052e service nova] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Updating instance_info_cache with network_info: [{"id": "a423c3f9-0b61-4375-9131-a98f082c1193", "address": "fa:16:3e:15:82:ce", "network": {"id": "443d2d62-bcef-44b2-814a-3e5dc50abc04", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-772061432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.136", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85c0cc8e0f544bfbb76970d3123fbb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa423c3f9-0b", "ovs_interfaceid": "a423c3f9-0b61-4375-9131-a98f082c1193", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.559254] env[62070]: INFO nova.compute.manager [None req-bfe789c7-9756-4165-ae60-a38857f0dfb6 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Detaching volume 773d984d-4185-4716-a44d-6527016a9c86 [ 1013.589310] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d6c35e-9f3f-415f-8c6a-d0f593f4ea1e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.595986] env[62070]: INFO nova.virt.block_device [None req-bfe789c7-9756-4165-ae60-a38857f0dfb6 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Attempting to driver detach volume 773d984d-4185-4716-a44d-6527016a9c86 from mountpoint /dev/sdb [ 1013.596427] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfe789c7-9756-4165-ae60-a38857f0dfb6 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Volume detach. Driver type: vmdk {{(pid=62070) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1013.596756] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfe789c7-9756-4165-ae60-a38857f0dfb6 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245480', 'volume_id': '773d984d-4185-4716-a44d-6527016a9c86', 'name': 'volume-773d984d-4185-4716-a44d-6527016a9c86', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '84c00e4a-20d3-4739-8535-e27076d85a89', 'attached_at': '', 'detached_at': '', 'volume_id': '773d984d-4185-4716-a44d-6527016a9c86', 'serial': '773d984d-4185-4716-a44d-6527016a9c86'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1013.598246] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e2e014-bf4b-4565-8c46-191c7fa1ebbb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.602537] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59300687-1ec3-4a95-a231-8aa201738f99 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.651595] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d5608a7-4230-4651-a086-f61a05f7db70 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.654961] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f346a8-ad74-4f37-8c3e-bb0f950b10db {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.662982] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab864f6-72a4-48ca-bbab-b8b13c649d32 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.666126] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a70578-db26-4550-8369-e0a7977caf48 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.696307] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15317ad2-34b8-4408-bd48-9785d65e22b5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.699309] env[62070]: DEBUG nova.compute.provider_tree [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1013.712648] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfe789c7-9756-4165-ae60-a38857f0dfb6 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] The volume has not been displaced from its original location: [datastore1] volume-773d984d-4185-4716-a44d-6527016a9c86/volume-773d984d-4185-4716-a44d-6527016a9c86.vmdk. No consolidation needed. {{(pid=62070) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1013.720017] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfe789c7-9756-4165-ae60-a38857f0dfb6 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Reconfiguring VM instance instance-00000053 to detach disk 2001 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1013.720017] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68a402e6-f69e-4cf7-8607-dd4c4c4556e3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.737770] env[62070]: DEBUG oslo_vmware.api [None req-bfe789c7-9756-4165-ae60-a38857f0dfb6 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1013.737770] env[62070]: value = "task-1122275" [ 1013.737770] env[62070]: _type = "Task" [ 1013.737770] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.745528] env[62070]: DEBUG oslo_vmware.api [None req-bfe789c7-9756-4165-ae60-a38857f0dfb6 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122275, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.833035] env[62070]: DEBUG nova.compute.manager [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1013.847744] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquiring lock "1ababba6-838c-4ba6-bd83-e2b15aaf4b97" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.848031] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Lock "1ababba6-838c-4ba6-bd83-e2b15aaf4b97" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.857333] env[62070]: DEBUG nova.virt.hardware [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1013.857578] env[62070]: DEBUG nova.virt.hardware [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1013.857746] env[62070]: DEBUG nova.virt.hardware [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1013.857953] env[62070]: DEBUG nova.virt.hardware [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1013.858147] env[62070]: DEBUG nova.virt.hardware [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1013.858355] env[62070]: DEBUG nova.virt.hardware [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1013.858600] env[62070]: DEBUG nova.virt.hardware [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1013.858772] env[62070]: DEBUG nova.virt.hardware [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1013.858947] env[62070]: DEBUG nova.virt.hardware [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1013.859168] env[62070]: DEBUG nova.virt.hardware [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1013.859460] env[62070]: DEBUG nova.virt.hardware [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1013.860811] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a56a61-0ae4-4ec8-b432-5757a39afb89 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.869505] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-092aa539-5d0e-45fd-8d9f-b74b574d6169 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.969797] env[62070]: DEBUG oslo_concurrency.lockutils [req-4f225cb7-2f43-469f-9e58-d0b37eb9a742 req-be07e15a-1b73-4f61-ad32-8de14c11052e service nova] Releasing lock "refresh_cache-5cccd79d-d243-49db-8581-718dd594f3b3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.200666] env[62070]: DEBUG nova.network.neutron [None req-2a9c8032-9eaa-42ed-988b-ffc6e40f7aff tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Updating instance_info_cache with network_info: [{"id": "a3ed0957-14c2-4144-8d45-f4a0e5cb45ab", "address": "fa:16:3e:3c:6a:3d", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3ed0957-14", "ovs_interfaceid": "a3ed0957-14c2-4144-8d45-f4a0e5cb45ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.202581] env[62070]: DEBUG nova.scheduler.client.report [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1014.248508] env[62070]: DEBUG oslo_vmware.api [None req-bfe789c7-9756-4165-ae60-a38857f0dfb6 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122275, 'name': ReconfigVM_Task, 'duration_secs': 0.377257} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.248803] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfe789c7-9756-4165-ae60-a38857f0dfb6 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Reconfigured VM instance instance-00000053 to detach disk 2001 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1014.253817] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-065d6b41-2f15-497b-8f26-1a88fced904c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.269644] env[62070]: DEBUG oslo_vmware.api [None req-bfe789c7-9756-4165-ae60-a38857f0dfb6 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1014.269644] env[62070]: value = "task-1122276" [ 1014.269644] env[62070]: _type = "Task" [ 1014.269644] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.279994] env[62070]: DEBUG oslo_vmware.api [None req-bfe789c7-9756-4165-ae60-a38857f0dfb6 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122276, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.352689] env[62070]: DEBUG nova.compute.manager [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1014.707182] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2a9c8032-9eaa-42ed-988b-ffc6e40f7aff tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Releasing lock "refresh_cache-71aead12-a182-40a7-b5a9-91c01271b800" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.707621] env[62070]: DEBUG nova.objects.instance [None req-2a9c8032-9eaa-42ed-988b-ffc6e40f7aff tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lazy-loading 'migration_context' on Instance uuid 71aead12-a182-40a7-b5a9-91c01271b800 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1014.709263] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.885s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.711819] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1551e19b-6f87-42d6-8898-94554d769890 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.131s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.740655] env[62070]: INFO nova.scheduler.client.report [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Deleted allocations for instance 1d595bc8-ab51-4443-bf32-079078f3133b [ 1014.780114] env[62070]: DEBUG oslo_vmware.api [None req-bfe789c7-9756-4165-ae60-a38857f0dfb6 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122276, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.844972] env[62070]: DEBUG nova.compute.manager [req-bf4536e2-bf7d-4101-8d2a-0f1704d1a96a req-a6645f26-ab50-4038-baa9-93f04c02aee6 service nova] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Received event network-vif-plugged-222fba36-759a-41f7-a82a-cb4047bd3725 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1014.845223] env[62070]: DEBUG oslo_concurrency.lockutils [req-bf4536e2-bf7d-4101-8d2a-0f1704d1a96a req-a6645f26-ab50-4038-baa9-93f04c02aee6 service nova] Acquiring lock "33d04e59-da01-4ba3-ac42-ab93372a332d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.845672] env[62070]: DEBUG oslo_concurrency.lockutils [req-bf4536e2-bf7d-4101-8d2a-0f1704d1a96a req-a6645f26-ab50-4038-baa9-93f04c02aee6 service nova] Lock "33d04e59-da01-4ba3-ac42-ab93372a332d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.845672] env[62070]: DEBUG oslo_concurrency.lockutils [req-bf4536e2-bf7d-4101-8d2a-0f1704d1a96a req-a6645f26-ab50-4038-baa9-93f04c02aee6 service nova] Lock "33d04e59-da01-4ba3-ac42-ab93372a332d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.845770] env[62070]: DEBUG nova.compute.manager [req-bf4536e2-bf7d-4101-8d2a-0f1704d1a96a req-a6645f26-ab50-4038-baa9-93f04c02aee6 service nova] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] No waiting events found dispatching network-vif-plugged-222fba36-759a-41f7-a82a-cb4047bd3725 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1014.845974] env[62070]: WARNING nova.compute.manager [req-bf4536e2-bf7d-4101-8d2a-0f1704d1a96a req-a6645f26-ab50-4038-baa9-93f04c02aee6 service nova] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Received unexpected event network-vif-plugged-222fba36-759a-41f7-a82a-cb4047bd3725 for instance with vm_state building and task_state spawning. [ 1014.873619] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.977688] env[62070]: DEBUG nova.network.neutron [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Successfully updated port: 222fba36-759a-41f7-a82a-cb4047bd3725 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1015.213100] env[62070]: DEBUG nova.objects.base [None req-2a9c8032-9eaa-42ed-988b-ffc6e40f7aff tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Object Instance<71aead12-a182-40a7-b5a9-91c01271b800> lazy-loaded attributes: info_cache,migration_context {{(pid=62070) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1015.218307] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3975926-c841-4798-aba4-ecd189fd5e07 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.239029] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0753964c-a514-4611-a429-0919d40014cd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.244307] env[62070]: DEBUG oslo_vmware.api [None req-2a9c8032-9eaa-42ed-988b-ffc6e40f7aff tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1015.244307] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52787dc1-00f5-9cdc-426b-60c00d65efc3" [ 1015.244307] env[62070]: _type = "Task" [ 1015.244307] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.250348] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cbdb285e-928a-4802-98d0-fa21a116be33 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "1d595bc8-ab51-4443-bf32-079078f3133b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.911s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.254373] env[62070]: DEBUG oslo_vmware.api [None req-2a9c8032-9eaa-42ed-988b-ffc6e40f7aff tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52787dc1-00f5-9cdc-426b-60c00d65efc3, 'name': SearchDatastore_Task, 'duration_secs': 0.00784} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.257020] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2a9c8032-9eaa-42ed-988b-ffc6e40f7aff tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.282108] env[62070]: DEBUG oslo_vmware.api [None req-bfe789c7-9756-4165-ae60-a38857f0dfb6 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122276, 'name': ReconfigVM_Task, 'duration_secs': 0.922061} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.282108] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfe789c7-9756-4165-ae60-a38857f0dfb6 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245480', 'volume_id': '773d984d-4185-4716-a44d-6527016a9c86', 'name': 'volume-773d984d-4185-4716-a44d-6527016a9c86', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '84c00e4a-20d3-4739-8535-e27076d85a89', 'attached_at': '', 'detached_at': '', 'volume_id': '773d984d-4185-4716-a44d-6527016a9c86', 'serial': '773d984d-4185-4716-a44d-6527016a9c86'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1015.471869] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b805511-377e-4a94-8eff-1cce9983dcbe {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.479721] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ce57b81-9424-4a04-95c7-36f189f9939b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.483108] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "refresh_cache-33d04e59-da01-4ba3-ac42-ab93372a332d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1015.483253] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired lock "refresh_cache-33d04e59-da01-4ba3-ac42-ab93372a332d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.483420] env[62070]: DEBUG nova.network.neutron [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1015.511930] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e05f27a-560b-407a-887f-000429550878 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.520844] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-413fe153-9103-462d-9a71-b31f594ec560 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.536880] env[62070]: DEBUG nova.compute.provider_tree [None req-1551e19b-6f87-42d6-8898-94554d769890 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1015.827167] env[62070]: DEBUG nova.objects.instance [None req-bfe789c7-9756-4165-ae60-a38857f0dfb6 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lazy-loading 'flavor' on Instance uuid 84c00e4a-20d3-4739-8535-e27076d85a89 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1015.988058] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1015.988551] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1016.025558] env[62070]: DEBUG nova.network.neutron [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1016.041220] env[62070]: DEBUG nova.scheduler.client.report [None req-1551e19b-6f87-42d6-8898-94554d769890 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1016.230950] env[62070]: DEBUG nova.network.neutron [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Updating instance_info_cache with network_info: [{"id": "222fba36-759a-41f7-a82a-cb4047bd3725", "address": "fa:16:3e:7a:4c:a1", "network": {"id": "5ea0fffc-372c-450e-b27b-10959077d58f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1853458988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9191f0e6c2ee401abca64c0780e230bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3c995e9-7f2f-420c-880a-d60da6e708ad", "external-id": "nsx-vlan-transportzone-166", "segmentation_id": 166, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap222fba36-75", "ovs_interfaceid": "222fba36-759a-41f7-a82a-cb4047bd3725", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.391652] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "6b6e22b9-71fb-4139-993a-7b9fcf89d8e0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.391963] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "6b6e22b9-71fb-4139-993a-7b9fcf89d8e0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.455322] env[62070]: DEBUG oslo_concurrency.lockutils [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "5ec9074b-1237-4404-b13c-a7ca0dbe1d43" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.455596] env[62070]: DEBUG oslo_concurrency.lockutils [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "5ec9074b-1237-4404-b13c-a7ca0dbe1d43" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.455833] env[62070]: DEBUG oslo_concurrency.lockutils [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "5ec9074b-1237-4404-b13c-a7ca0dbe1d43-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.456084] env[62070]: DEBUG oslo_concurrency.lockutils [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "5ec9074b-1237-4404-b13c-a7ca0dbe1d43-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.456281] env[62070]: DEBUG oslo_concurrency.lockutils [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "5ec9074b-1237-4404-b13c-a7ca0dbe1d43-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.458433] env[62070]: INFO nova.compute.manager [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Terminating instance [ 1016.460212] env[62070]: DEBUG nova.compute.manager [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1016.460432] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1016.461314] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e38a671-e304-42ab-a50f-ef3bad0f1dc1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.468846] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1016.469356] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ae07e550-2c54-4251-b498-f9a2c90c42dc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.475580] env[62070]: DEBUG oslo_vmware.api [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 1016.475580] env[62070]: value = "task-1122278" [ 1016.475580] env[62070]: _type = "Task" [ 1016.475580] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.482831] env[62070]: DEBUG oslo_vmware.api [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122278, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.495466] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1016.495674] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Starting heal instance info cache {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1016.619671] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b17f16d-ee96-409a-b0d3-8d42aaac32b0 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Volume attach. Driver type: vmdk {{(pid=62070) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1016.620185] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b17f16d-ee96-409a-b0d3-8d42aaac32b0 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245489', 'volume_id': '0635ea59-c4ec-4e97-9bdd-1d58208eb929', 'name': 'volume-0635ea59-c4ec-4e97-9bdd-1d58208eb929', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a5cba512-9b50-4ca3-93eb-345be12dc588', 'attached_at': '', 'detached_at': '', 'volume_id': '0635ea59-c4ec-4e97-9bdd-1d58208eb929', 'serial': '0635ea59-c4ec-4e97-9bdd-1d58208eb929'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1016.621313] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ceaac43-e541-4f6d-a1cb-70b12cb89872 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.639493] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e31ca0b-4c93-4e66-919e-96002c973957 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.665167] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b17f16d-ee96-409a-b0d3-8d42aaac32b0 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] volume-0635ea59-c4ec-4e97-9bdd-1d58208eb929/volume-0635ea59-c4ec-4e97-9bdd-1d58208eb929.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1016.665534] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31519a54-f1b1-475a-8fe7-39fe43d74110 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.685187] env[62070]: DEBUG oslo_vmware.api [None req-4b17f16d-ee96-409a-b0d3-8d42aaac32b0 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1016.685187] env[62070]: value = "task-1122279" [ 1016.685187] env[62070]: _type = "Task" [ 1016.685187] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.696853] env[62070]: DEBUG oslo_vmware.api [None req-4b17f16d-ee96-409a-b0d3-8d42aaac32b0 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122279, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.734133] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Releasing lock "refresh_cache-33d04e59-da01-4ba3-ac42-ab93372a332d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1016.734512] env[62070]: DEBUG nova.compute.manager [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Instance network_info: |[{"id": "222fba36-759a-41f7-a82a-cb4047bd3725", "address": "fa:16:3e:7a:4c:a1", "network": {"id": "5ea0fffc-372c-450e-b27b-10959077d58f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1853458988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9191f0e6c2ee401abca64c0780e230bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3c995e9-7f2f-420c-880a-d60da6e708ad", "external-id": "nsx-vlan-transportzone-166", "segmentation_id": 166, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap222fba36-75", "ovs_interfaceid": "222fba36-759a-41f7-a82a-cb4047bd3725", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1016.735033] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:4c:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3c995e9-7f2f-420c-880a-d60da6e708ad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '222fba36-759a-41f7-a82a-cb4047bd3725', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1016.742685] env[62070]: DEBUG oslo.service.loopingcall [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1016.742931] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1016.743181] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-403cd25c-138b-41df-9046-b117a30ee162 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.763700] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1016.763700] env[62070]: value = "task-1122280" [ 1016.763700] env[62070]: _type = "Task" [ 1016.763700] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.771369] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122280, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.835611] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bfe789c7-9756-4165-ae60-a38857f0dfb6 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "84c00e4a-20d3-4739-8535-e27076d85a89" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.780s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.872832] env[62070]: DEBUG nova.compute.manager [req-2849e3eb-ff79-4e27-b9d0-3182be955df9 req-3919109e-6bab-4cb4-b633-ce6edc8c77be service nova] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Received event network-changed-222fba36-759a-41f7-a82a-cb4047bd3725 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1016.873045] env[62070]: DEBUG nova.compute.manager [req-2849e3eb-ff79-4e27-b9d0-3182be955df9 req-3919109e-6bab-4cb4-b633-ce6edc8c77be service nova] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Refreshing instance network info cache due to event network-changed-222fba36-759a-41f7-a82a-cb4047bd3725. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1016.873334] env[62070]: DEBUG oslo_concurrency.lockutils [req-2849e3eb-ff79-4e27-b9d0-3182be955df9 req-3919109e-6bab-4cb4-b633-ce6edc8c77be service nova] Acquiring lock "refresh_cache-33d04e59-da01-4ba3-ac42-ab93372a332d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1016.873503] env[62070]: DEBUG oslo_concurrency.lockutils [req-2849e3eb-ff79-4e27-b9d0-3182be955df9 req-3919109e-6bab-4cb4-b633-ce6edc8c77be service nova] Acquired lock "refresh_cache-33d04e59-da01-4ba3-ac42-ab93372a332d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.873718] env[62070]: DEBUG nova.network.neutron [req-2849e3eb-ff79-4e27-b9d0-3182be955df9 req-3919109e-6bab-4cb4-b633-ce6edc8c77be service nova] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Refreshing network info cache for port 222fba36-759a-41f7-a82a-cb4047bd3725 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1016.894608] env[62070]: DEBUG nova.compute.manager [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1016.985831] env[62070]: DEBUG oslo_vmware.api [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122278, 'name': PowerOffVM_Task, 'duration_secs': 0.23771} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.986167] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1016.986350] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1016.986620] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-87dc22c8-8719-4712-84f1-ab7001915f5b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.051539] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1551e19b-6f87-42d6-8898-94554d769890 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.339s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.055306] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.107s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.056611] env[62070]: INFO nova.compute.claims [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1017.071393] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1017.072097] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1017.072097] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Deleting the datastore file [datastore1] 5ec9074b-1237-4404-b13c-a7ca0dbe1d43 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1017.072097] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a8f7030-cc28-45cb-b5b0-a3460565d5e6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.080042] env[62070]: DEBUG oslo_vmware.api [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 1017.080042] env[62070]: value = "task-1122282" [ 1017.080042] env[62070]: _type = "Task" [ 1017.080042] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.088073] env[62070]: DEBUG oslo_vmware.api [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122282, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.198283] env[62070]: DEBUG oslo_vmware.api [None req-4b17f16d-ee96-409a-b0d3-8d42aaac32b0 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122279, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.274353] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122280, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.417875] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.552296] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "refresh_cache-67e99ada-a8e6-4034-b19b-5b2cb883b735" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1017.552456] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquired lock "refresh_cache-67e99ada-a8e6-4034-b19b-5b2cb883b735" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.552608] env[62070]: DEBUG nova.network.neutron [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Forcefully refreshing network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1017.589032] env[62070]: DEBUG oslo_vmware.api [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122282, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.594658] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "84c00e4a-20d3-4739-8535-e27076d85a89" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.594859] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "84c00e4a-20d3-4739-8535-e27076d85a89" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.595082] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "84c00e4a-20d3-4739-8535-e27076d85a89-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.595283] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "84c00e4a-20d3-4739-8535-e27076d85a89-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.595461] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "84c00e4a-20d3-4739-8535-e27076d85a89-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.597565] env[62070]: INFO nova.compute.manager [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Terminating instance [ 1017.599496] env[62070]: DEBUG nova.compute.manager [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1017.599699] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1017.600527] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4cba23a-9605-4e4e-a810-ade20017c8ea {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.607845] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1017.608069] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-011ad5fc-660a-4992-9026-fbfee71c3cd9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.614445] env[62070]: INFO nova.scheduler.client.report [None req-1551e19b-6f87-42d6-8898-94554d769890 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Deleted allocation for migration bc687c47-1f98-4813-8b35-ebea64e5ada5 [ 1017.618268] env[62070]: DEBUG oslo_vmware.api [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1017.618268] env[62070]: value = "task-1122283" [ 1017.618268] env[62070]: _type = "Task" [ 1017.618268] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.622376] env[62070]: DEBUG nova.network.neutron [req-2849e3eb-ff79-4e27-b9d0-3182be955df9 req-3919109e-6bab-4cb4-b633-ce6edc8c77be service nova] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Updated VIF entry in instance network info cache for port 222fba36-759a-41f7-a82a-cb4047bd3725. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1017.622376] env[62070]: DEBUG nova.network.neutron [req-2849e3eb-ff79-4e27-b9d0-3182be955df9 req-3919109e-6bab-4cb4-b633-ce6edc8c77be service nova] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Updating instance_info_cache with network_info: [{"id": "222fba36-759a-41f7-a82a-cb4047bd3725", "address": "fa:16:3e:7a:4c:a1", "network": {"id": "5ea0fffc-372c-450e-b27b-10959077d58f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1853458988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9191f0e6c2ee401abca64c0780e230bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3c995e9-7f2f-420c-880a-d60da6e708ad", "external-id": "nsx-vlan-transportzone-166", "segmentation_id": 166, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap222fba36-75", "ovs_interfaceid": "222fba36-759a-41f7-a82a-cb4047bd3725", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.628531] env[62070]: DEBUG oslo_vmware.api [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122283, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.696129] env[62070]: DEBUG oslo_vmware.api [None req-4b17f16d-ee96-409a-b0d3-8d42aaac32b0 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122279, 'name': ReconfigVM_Task, 'duration_secs': 0.528548} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.696449] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b17f16d-ee96-409a-b0d3-8d42aaac32b0 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Reconfigured VM instance instance-00000058 to attach disk [datastore1] volume-0635ea59-c4ec-4e97-9bdd-1d58208eb929/volume-0635ea59-c4ec-4e97-9bdd-1d58208eb929.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1017.702020] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe3d7f63-3a08-47f5-bd10-91070ce53537 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.715946] env[62070]: DEBUG oslo_vmware.api [None req-4b17f16d-ee96-409a-b0d3-8d42aaac32b0 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1017.715946] env[62070]: value = "task-1122284" [ 1017.715946] env[62070]: _type = "Task" [ 1017.715946] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.724569] env[62070]: DEBUG oslo_vmware.api [None req-4b17f16d-ee96-409a-b0d3-8d42aaac32b0 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122284, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.775927] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122280, 'name': CreateVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.091693] env[62070]: DEBUG oslo_vmware.api [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122282, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.57493} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.092085] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1018.092142] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1018.092308] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1018.092481] env[62070]: INFO nova.compute.manager [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1018.092741] env[62070]: DEBUG oslo.service.loopingcall [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1018.093092] env[62070]: DEBUG nova.compute.manager [-] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1018.093092] env[62070]: DEBUG nova.network.neutron [-] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1018.128457] env[62070]: DEBUG oslo_concurrency.lockutils [req-2849e3eb-ff79-4e27-b9d0-3182be955df9 req-3919109e-6bab-4cb4-b633-ce6edc8c77be service nova] Releasing lock "refresh_cache-33d04e59-da01-4ba3-ac42-ab93372a332d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.129440] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1551e19b-6f87-42d6-8898-94554d769890 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "b101c79a-abfd-4104-aaed-096995fb2337" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 9.078s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.135394] env[62070]: DEBUG oslo_vmware.api [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122283, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.227960] env[62070]: DEBUG oslo_vmware.api [None req-4b17f16d-ee96-409a-b0d3-8d42aaac32b0 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122284, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.280180] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122280, 'name': CreateVM_Task, 'duration_secs': 1.354763} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.280347] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1018.281056] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1018.281243] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.281570] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1018.281840] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cef7feb0-0e41-4020-b582-477cb746432f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.286525] env[62070]: DEBUG oslo_vmware.api [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 1018.286525] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52c2d832-acdb-f1f6-2298-a65ba16a2b2b" [ 1018.286525] env[62070]: _type = "Task" [ 1018.286525] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.298385] env[62070]: DEBUG oslo_vmware.api [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52c2d832-acdb-f1f6-2298-a65ba16a2b2b, 'name': SearchDatastore_Task, 'duration_secs': 0.009343} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.298385] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.298915] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1018.299178] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1018.299334] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.299516] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1018.299787] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58563df1-26f2-48cd-8ad6-5a6c45b92385 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.308105] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1018.308284] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1018.309032] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-716abd4f-40c6-4161-811b-86c063ae8f41 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.319512] env[62070]: DEBUG oslo_vmware.api [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 1018.319512] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]526f9a30-986e-be33-6953-fe72bf399ce1" [ 1018.319512] env[62070]: _type = "Task" [ 1018.319512] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.327470] env[62070]: DEBUG oslo_vmware.api [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]526f9a30-986e-be33-6953-fe72bf399ce1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.329751] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a856ea-45e5-400f-9b9d-db69dbbfe957 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.335545] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b206f301-e061-4d15-8663-b2aeff388044 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.364403] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7c980b-34d3-4f77-b386-91092baf0827 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.372271] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd72229-6896-4449-ab7c-a9648b5ffba7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.385532] env[62070]: DEBUG nova.compute.provider_tree [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1018.632092] env[62070]: DEBUG oslo_vmware.api [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122283, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.728212] env[62070]: DEBUG oslo_vmware.api [None req-4b17f16d-ee96-409a-b0d3-8d42aaac32b0 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122284, 'name': ReconfigVM_Task, 'duration_secs': 0.595176} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.728465] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b17f16d-ee96-409a-b0d3-8d42aaac32b0 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245489', 'volume_id': '0635ea59-c4ec-4e97-9bdd-1d58208eb929', 'name': 'volume-0635ea59-c4ec-4e97-9bdd-1d58208eb929', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a5cba512-9b50-4ca3-93eb-345be12dc588', 'attached_at': '', 'detached_at': '', 'volume_id': '0635ea59-c4ec-4e97-9bdd-1d58208eb929', 'serial': '0635ea59-c4ec-4e97-9bdd-1d58208eb929'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1018.831351] env[62070]: DEBUG oslo_vmware.api [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]526f9a30-986e-be33-6953-fe72bf399ce1, 'name': SearchDatastore_Task, 'duration_secs': 0.014884} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.832219] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-274387cb-0f87-4c5c-b7f7-b4773e203cba {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.837652] env[62070]: DEBUG oslo_vmware.api [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 1018.837652] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52640d9c-1c38-97c5-c34f-d3f3c55f19ef" [ 1018.837652] env[62070]: _type = "Task" [ 1018.837652] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.845893] env[62070]: DEBUG oslo_vmware.api [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52640d9c-1c38-97c5-c34f-d3f3c55f19ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.890708] env[62070]: DEBUG nova.scheduler.client.report [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1018.911020] env[62070]: DEBUG nova.compute.manager [req-f6819c0e-f31e-462d-b6e3-ccd2d5aa6792 req-a6a7a3a8-0f2e-4d62-aeb5-b4a2cf114cfb service nova] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Received event network-vif-deleted-0b2740c1-7e91-45f2-b9e3-95b268c21eff {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1018.911279] env[62070]: INFO nova.compute.manager [req-f6819c0e-f31e-462d-b6e3-ccd2d5aa6792 req-a6a7a3a8-0f2e-4d62-aeb5-b4a2cf114cfb service nova] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Neutron deleted interface 0b2740c1-7e91-45f2-b9e3-95b268c21eff; detaching it from the instance and deleting it from the info cache [ 1018.911467] env[62070]: DEBUG nova.network.neutron [req-f6819c0e-f31e-462d-b6e3-ccd2d5aa6792 req-a6a7a3a8-0f2e-4d62-aeb5-b4a2cf114cfb service nova] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.939722] env[62070]: DEBUG nova.network.neutron [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Updating instance_info_cache with network_info: [{"id": "e01eb485-1347-4afb-b881-62797a5b84af", "address": "fa:16:3e:d1:62:ff", "network": {"id": "0d81bd04-b549-4e1f-97a2-0a0b9391dd3f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-108214409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c91e5eeeeb1742f499b2edaf76a93a3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape01eb485-13", "ovs_interfaceid": "e01eb485-1347-4afb-b881-62797a5b84af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.966477] env[62070]: DEBUG nova.network.neutron [-] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.976769] env[62070]: DEBUG oslo_concurrency.lockutils [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "b101c79a-abfd-4104-aaed-096995fb2337" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.977038] env[62070]: DEBUG oslo_concurrency.lockutils [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "b101c79a-abfd-4104-aaed-096995fb2337" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.977262] env[62070]: DEBUG oslo_concurrency.lockutils [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "b101c79a-abfd-4104-aaed-096995fb2337-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.977449] env[62070]: DEBUG oslo_concurrency.lockutils [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "b101c79a-abfd-4104-aaed-096995fb2337-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.981017] env[62070]: DEBUG oslo_concurrency.lockutils [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "b101c79a-abfd-4104-aaed-096995fb2337-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.981017] env[62070]: INFO nova.compute.manager [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Terminating instance [ 1018.985555] env[62070]: DEBUG nova.compute.manager [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1018.986198] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1018.987624] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d009d9-cbe5-4e61-8e67-ab7ce362808f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.998008] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1018.998300] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-70288556-bf44-4fcb-84b6-e2ed4d421dab {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.004910] env[62070]: DEBUG oslo_vmware.api [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 1019.004910] env[62070]: value = "task-1122285" [ 1019.004910] env[62070]: _type = "Task" [ 1019.004910] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.014475] env[62070]: DEBUG oslo_vmware.api [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122285, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.131312] env[62070]: DEBUG oslo_vmware.api [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122283, 'name': PowerOffVM_Task, 'duration_secs': 1.265064} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.131864] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1019.131864] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1019.132098] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c0ce76b9-56f1-4e2b-882d-40987ba39a98 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.202613] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1019.202891] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1019.203110] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Deleting the datastore file [datastore2] 84c00e4a-20d3-4739-8535-e27076d85a89 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1019.203465] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5305bea6-2e32-4fa9-bd22-fc0bc426d8ce {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.209415] env[62070]: DEBUG oslo_vmware.api [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1019.209415] env[62070]: value = "task-1122287" [ 1019.209415] env[62070]: _type = "Task" [ 1019.209415] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.218275] env[62070]: DEBUG oslo_vmware.api [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122287, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.349322] env[62070]: DEBUG oslo_vmware.api [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52640d9c-1c38-97c5-c34f-d3f3c55f19ef, 'name': SearchDatastore_Task, 'duration_secs': 0.011117} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.349602] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1019.349866] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 33d04e59-da01-4ba3-ac42-ab93372a332d/33d04e59-da01-4ba3-ac42-ab93372a332d.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1019.350140] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e8e72960-6440-42c1-8313-aff253063da6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.359468] env[62070]: DEBUG oslo_vmware.api [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 1019.359468] env[62070]: value = "task-1122288" [ 1019.359468] env[62070]: _type = "Task" [ 1019.359468] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.365921] env[62070]: DEBUG oslo_vmware.api [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122288, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.396203] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.341s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.398123] env[62070]: DEBUG nova.compute.manager [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1019.400640] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.527s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.402165] env[62070]: INFO nova.compute.claims [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1019.414662] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-27ca8c0f-62e8-4a54-b55b-72156b5fe39b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.426033] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a48bb819-c940-4e73-804d-29aeab542c75 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.445625] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Releasing lock "refresh_cache-67e99ada-a8e6-4034-b19b-5b2cb883b735" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1019.445832] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Updated the network info_cache for instance {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1019.446035] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.446204] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.446390] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.446556] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.446701] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.446845] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.446974] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62070) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1019.447153] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.457886] env[62070]: DEBUG nova.compute.manager [req-f6819c0e-f31e-462d-b6e3-ccd2d5aa6792 req-a6a7a3a8-0f2e-4d62-aeb5-b4a2cf114cfb service nova] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Detach interface failed, port_id=0b2740c1-7e91-45f2-b9e3-95b268c21eff, reason: Instance 5ec9074b-1237-4404-b13c-a7ca0dbe1d43 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1019.459282] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.470398] env[62070]: INFO nova.compute.manager [-] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Took 1.38 seconds to deallocate network for instance. [ 1019.514764] env[62070]: DEBUG oslo_vmware.api [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122285, 'name': PowerOffVM_Task, 'duration_secs': 0.296958} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.515062] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1019.515241] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1019.515505] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-020fa495-8079-4aef-8ad3-0bc831de78d8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.580925] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1019.582034] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1019.582034] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Deleting the datastore file [datastore2] b101c79a-abfd-4104-aaed-096995fb2337 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1019.582034] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ff87bbb9-fec5-435b-b1dd-5adea54964a0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.590678] env[62070]: DEBUG oslo_vmware.api [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 1019.590678] env[62070]: value = "task-1122290" [ 1019.590678] env[62070]: _type = "Task" [ 1019.590678] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.599731] env[62070]: DEBUG oslo_vmware.api [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122290, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.640777] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.640965] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.720292] env[62070]: DEBUG oslo_vmware.api [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122287, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149049} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.720582] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1019.720778] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1019.720962] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1019.721213] env[62070]: INFO nova.compute.manager [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Took 2.12 seconds to destroy the instance on the hypervisor. [ 1019.721486] env[62070]: DEBUG oslo.service.loopingcall [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1019.722163] env[62070]: DEBUG nova.compute.manager [-] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1019.722163] env[62070]: DEBUG nova.network.neutron [-] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1019.770502] env[62070]: DEBUG nova.objects.instance [None req-4b17f16d-ee96-409a-b0d3-8d42aaac32b0 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lazy-loading 'flavor' on Instance uuid a5cba512-9b50-4ca3-93eb-345be12dc588 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1019.868801] env[62070]: DEBUG oslo_vmware.api [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122288, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476561} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.869035] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 33d04e59-da01-4ba3-ac42-ab93372a332d/33d04e59-da01-4ba3-ac42-ab93372a332d.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1019.869270] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1019.869533] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fc09c8ea-2311-42ec-8d49-3dc28b815d7f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.875957] env[62070]: DEBUG oslo_vmware.api [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 1019.875957] env[62070]: value = "task-1122291" [ 1019.875957] env[62070]: _type = "Task" [ 1019.875957] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.883260] env[62070]: DEBUG oslo_vmware.api [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122291, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.906298] env[62070]: DEBUG nova.compute.utils [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1019.909788] env[62070]: DEBUG nova.compute.manager [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1019.909788] env[62070]: DEBUG nova.network.neutron [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1019.976253] env[62070]: DEBUG oslo_concurrency.lockutils [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.980391] env[62070]: DEBUG nova.policy [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13735e4c994b4695939f72e13709832d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3587df64b22e4cfc8220532cdda18c28', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 1020.102881] env[62070]: DEBUG oslo_vmware.api [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122290, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.279089} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.103830] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1020.104182] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1020.104676] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1020.104995] env[62070]: INFO nova.compute.manager [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1020.105557] env[62070]: DEBUG oslo.service.loopingcall [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1020.107024] env[62070]: DEBUG nova.compute.manager [-] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1020.107024] env[62070]: DEBUG nova.network.neutron [-] [instance: b101c79a-abfd-4104-aaed-096995fb2337] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1020.145523] env[62070]: DEBUG nova.compute.manager [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1020.276543] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4b17f16d-ee96-409a-b0d3-8d42aaac32b0 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "a5cba512-9b50-4ca3-93eb-345be12dc588" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.297s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.393796] env[62070]: DEBUG oslo_vmware.api [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122291, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066811} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.393796] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1020.395073] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65579760-e395-4062-a064-9a0aa792680e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.412383] env[62070]: DEBUG nova.compute.manager [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1020.426900] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 33d04e59-da01-4ba3-ac42-ab93372a332d/33d04e59-da01-4ba3-ac42-ab93372a332d.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1020.431810] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-69b47821-71b4-4c3b-ad47-0de97868e255 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.449196] env[62070]: DEBUG nova.compute.manager [req-5f30032b-beae-47bd-8ce2-021f9d6fb4e8 req-f4324d70-b415-432b-b7f7-d57d3c457334 service nova] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Received event network-vif-deleted-5b226cbf-df38-4b34-b591-7afc6de0a88c {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1020.449747] env[62070]: INFO nova.compute.manager [req-5f30032b-beae-47bd-8ce2-021f9d6fb4e8 req-f4324d70-b415-432b-b7f7-d57d3c457334 service nova] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Neutron deleted interface 5b226cbf-df38-4b34-b591-7afc6de0a88c; detaching it from the instance and deleting it from the info cache [ 1020.450493] env[62070]: DEBUG nova.network.neutron [req-5f30032b-beae-47bd-8ce2-021f9d6fb4e8 req-f4324d70-b415-432b-b7f7-d57d3c457334 service nova] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.459717] env[62070]: DEBUG oslo_vmware.api [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 1020.459717] env[62070]: value = "task-1122292" [ 1020.459717] env[62070]: _type = "Task" [ 1020.459717] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.468689] env[62070]: DEBUG oslo_vmware.api [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122292, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.617928] env[62070]: DEBUG nova.network.neutron [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Successfully created port: 8372f59a-de9e-4062-be0b-39996b8f6c9e {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1020.672348] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.717871] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84034798-1029-4bf4-839f-a3f438d0ad61 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.726111] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96bcd4d5-4f71-4d43-9880-332db110fb74 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.760314] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5a7d898-478f-4382-a22a-4e8225d92727 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.769433] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc5c989-4f51-4b6a-8bdb-829e30437f1e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.782572] env[62070]: DEBUG nova.compute.provider_tree [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1020.888189] env[62070]: DEBUG nova.network.neutron [-] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.937433] env[62070]: DEBUG nova.compute.manager [req-259591da-a1ea-48be-a6a5-24a3ec4e8237 req-7111101a-686c-4bea-b06d-4e26093a4514 service nova] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Received event network-vif-deleted-0e90b544-5a90-4009-8f52-635e393cf106 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1020.937638] env[62070]: INFO nova.compute.manager [req-259591da-a1ea-48be-a6a5-24a3ec4e8237 req-7111101a-686c-4bea-b06d-4e26093a4514 service nova] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Neutron deleted interface 0e90b544-5a90-4009-8f52-635e393cf106; detaching it from the instance and deleting it from the info cache [ 1020.937829] env[62070]: DEBUG nova.network.neutron [req-259591da-a1ea-48be-a6a5-24a3ec4e8237 req-7111101a-686c-4bea-b06d-4e26093a4514 service nova] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.953523] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5630d10f-27e1-4c08-9a8b-5c37c3777a0d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.969294] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acdc7370-6f0e-4d5b-a3ee-5c4e2885d1e6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.984200] env[62070]: DEBUG oslo_vmware.api [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122292, 'name': ReconfigVM_Task, 'duration_secs': 0.28747} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.984562] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 33d04e59-da01-4ba3-ac42-ab93372a332d/33d04e59-da01-4ba3-ac42-ab93372a332d.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1020.985424] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2d083d0f-44d7-414d-a810-f9c8d97f1bbd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.992564] env[62070]: DEBUG oslo_vmware.api [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 1020.992564] env[62070]: value = "task-1122293" [ 1020.992564] env[62070]: _type = "Task" [ 1020.992564] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.006577] env[62070]: DEBUG nova.compute.manager [req-5f30032b-beae-47bd-8ce2-021f9d6fb4e8 req-f4324d70-b415-432b-b7f7-d57d3c457334 service nova] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Detach interface failed, port_id=5b226cbf-df38-4b34-b591-7afc6de0a88c, reason: Instance b101c79a-abfd-4104-aaed-096995fb2337 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1021.011956] env[62070]: DEBUG oslo_vmware.api [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122293, 'name': Rename_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.076966] env[62070]: DEBUG nova.network.neutron [-] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.285826] env[62070]: DEBUG nova.scheduler.client.report [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1021.390661] env[62070]: INFO nova.compute.manager [-] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Took 1.28 seconds to deallocate network for instance. [ 1021.440184] env[62070]: DEBUG nova.compute.manager [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1021.442828] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-864789c0-9ac2-4aad-a8b1-7316479d6276 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.451780] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a348de-30b6-4764-8d29-df472829f972 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.471218] env[62070]: DEBUG nova.virt.hardware [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1021.471480] env[62070]: DEBUG nova.virt.hardware [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1021.471709] env[62070]: DEBUG nova.virt.hardware [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1021.471959] env[62070]: DEBUG nova.virt.hardware [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1021.472176] env[62070]: DEBUG nova.virt.hardware [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1021.472371] env[62070]: DEBUG nova.virt.hardware [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1021.472596] env[62070]: DEBUG nova.virt.hardware [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1021.472763] env[62070]: DEBUG nova.virt.hardware [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1021.472936] env[62070]: DEBUG nova.virt.hardware [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1021.473119] env[62070]: DEBUG nova.virt.hardware [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1021.473306] env[62070]: DEBUG nova.virt.hardware [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1021.474247] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98880e60-ee60-43d6-9fa1-00df52825c5a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.485201] env[62070]: DEBUG nova.compute.manager [req-259591da-a1ea-48be-a6a5-24a3ec4e8237 req-7111101a-686c-4bea-b06d-4e26093a4514 service nova] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Detach interface failed, port_id=0e90b544-5a90-4009-8f52-635e393cf106, reason: Instance 84c00e4a-20d3-4739-8535-e27076d85a89 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1021.490520] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb915d5-08ad-45ad-92b6-69508610c01c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.510671] env[62070]: DEBUG oslo_vmware.api [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122293, 'name': Rename_Task, 'duration_secs': 0.145879} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.510934] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1021.511195] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-25a1f0f6-b873-4ae8-a43e-59d75a3c25d8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.516685] env[62070]: DEBUG oslo_vmware.api [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 1021.516685] env[62070]: value = "task-1122294" [ 1021.516685] env[62070]: _type = "Task" [ 1021.516685] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.524641] env[62070]: DEBUG oslo_vmware.api [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122294, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.582943] env[62070]: INFO nova.compute.manager [-] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Took 1.86 seconds to deallocate network for instance. [ 1021.794889] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.394s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.795407] env[62070]: DEBUG nova.compute.manager [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1021.799151] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2a9c8032-9eaa-42ed-988b-ffc6e40f7aff tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 6.542s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.897724] env[62070]: DEBUG oslo_concurrency.lockutils [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.029896] env[62070]: DEBUG oslo_vmware.api [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122294, 'name': PowerOnVM_Task, 'duration_secs': 0.469377} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.030680] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1022.030897] env[62070]: INFO nova.compute.manager [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Took 8.20 seconds to spawn the instance on the hypervisor. [ 1022.031161] env[62070]: DEBUG nova.compute.manager [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1022.031922] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-343bb191-e3c0-4891-a2e1-a0505552a0ed {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.089861] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.234260] env[62070]: DEBUG nova.network.neutron [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Successfully updated port: 8372f59a-de9e-4062-be0b-39996b8f6c9e {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1022.242911] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquiring lock "53a1791d-38fd-4721-b82c-2f0922348300" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.243251] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "53a1791d-38fd-4721-b82c-2f0922348300" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.244038] env[62070]: INFO nova.compute.manager [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Shelving [ 1022.277627] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "000a67eb-9535-4da6-816a-b61126f11509" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.277980] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "000a67eb-9535-4da6-816a-b61126f11509" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.304156] env[62070]: DEBUG nova.compute.utils [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1022.308052] env[62070]: DEBUG nova.compute.manager [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1022.308052] env[62070]: DEBUG nova.network.neutron [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1022.380425] env[62070]: DEBUG nova.policy [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20349b8cb1f24d5588d6109b09a335de', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e1960779e94c4e119497a0c1117f54fc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 1022.550862] env[62070]: INFO nova.compute.manager [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Took 14.36 seconds to build instance. [ 1022.582356] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d9fb2a-ddb4-4da3-b9d1-e964b02a7dfa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.590746] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5067ac52-7455-4b09-900f-9faad582b94d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.622047] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c54db278-7e91-4a93-9d74-63b741e0d721 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.629568] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b91a05-7d54-4c34-a8d9-1df6680cada8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.644428] env[62070]: DEBUG nova.compute.provider_tree [None req-2a9c8032-9eaa-42ed-988b-ffc6e40f7aff tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1022.700329] env[62070]: DEBUG nova.network.neutron [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Successfully created port: df67c123-2618-45ce-8175-66a34206293a {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1022.737418] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Acquiring lock "refresh_cache-7bfda953-ac95-4dce-b7a7-c570eae35582" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1022.738035] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Acquired lock "refresh_cache-7bfda953-ac95-4dce-b7a7-c570eae35582" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.738035] env[62070]: DEBUG nova.network.neutron [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1022.753227] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1022.754119] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a4760d14-4d05-40c4-9a9d-78181ef035fe {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.761355] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 1022.761355] env[62070]: value = "task-1122295" [ 1022.761355] env[62070]: _type = "Task" [ 1022.761355] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.770435] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122295, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.780471] env[62070]: DEBUG nova.compute.manager [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1022.809228] env[62070]: DEBUG nova.compute.manager [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1023.054377] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d3d12465-2651-49ee-be0e-d6920a7c6c57 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "33d04e59-da01-4ba3-ac42-ab93372a332d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.879s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.077170] env[62070]: DEBUG nova.compute.manager [req-52f68f23-57b8-4c87-bb70-1b8cb64ed13c req-a1b4158c-dc07-4aed-83b4-a63c13969e79 service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Received event network-vif-plugged-8372f59a-de9e-4062-be0b-39996b8f6c9e {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1023.077170] env[62070]: DEBUG oslo_concurrency.lockutils [req-52f68f23-57b8-4c87-bb70-1b8cb64ed13c req-a1b4158c-dc07-4aed-83b4-a63c13969e79 service nova] Acquiring lock "7bfda953-ac95-4dce-b7a7-c570eae35582-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.077170] env[62070]: DEBUG oslo_concurrency.lockutils [req-52f68f23-57b8-4c87-bb70-1b8cb64ed13c req-a1b4158c-dc07-4aed-83b4-a63c13969e79 service nova] Lock "7bfda953-ac95-4dce-b7a7-c570eae35582-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1023.077282] env[62070]: DEBUG oslo_concurrency.lockutils [req-52f68f23-57b8-4c87-bb70-1b8cb64ed13c req-a1b4158c-dc07-4aed-83b4-a63c13969e79 service nova] Lock "7bfda953-ac95-4dce-b7a7-c570eae35582-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.077452] env[62070]: DEBUG nova.compute.manager [req-52f68f23-57b8-4c87-bb70-1b8cb64ed13c req-a1b4158c-dc07-4aed-83b4-a63c13969e79 service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] No waiting events found dispatching network-vif-plugged-8372f59a-de9e-4062-be0b-39996b8f6c9e {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1023.077622] env[62070]: WARNING nova.compute.manager [req-52f68f23-57b8-4c87-bb70-1b8cb64ed13c req-a1b4158c-dc07-4aed-83b4-a63c13969e79 service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Received unexpected event network-vif-plugged-8372f59a-de9e-4062-be0b-39996b8f6c9e for instance with vm_state building and task_state spawning. [ 1023.077785] env[62070]: DEBUG nova.compute.manager [req-52f68f23-57b8-4c87-bb70-1b8cb64ed13c req-a1b4158c-dc07-4aed-83b4-a63c13969e79 service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Received event network-changed-8372f59a-de9e-4062-be0b-39996b8f6c9e {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1023.077964] env[62070]: DEBUG nova.compute.manager [req-52f68f23-57b8-4c87-bb70-1b8cb64ed13c req-a1b4158c-dc07-4aed-83b4-a63c13969e79 service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Refreshing instance network info cache due to event network-changed-8372f59a-de9e-4062-be0b-39996b8f6c9e. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1023.078222] env[62070]: DEBUG oslo_concurrency.lockutils [req-52f68f23-57b8-4c87-bb70-1b8cb64ed13c req-a1b4158c-dc07-4aed-83b4-a63c13969e79 service nova] Acquiring lock "refresh_cache-7bfda953-ac95-4dce-b7a7-c570eae35582" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1023.148054] env[62070]: DEBUG nova.scheduler.client.report [None req-2a9c8032-9eaa-42ed-988b-ffc6e40f7aff tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1023.270856] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122295, 'name': PowerOffVM_Task, 'duration_secs': 0.244278} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.271170] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1023.271955] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-425f72b9-13d2-41cd-bb31-5b0040a9476d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.294166] env[62070]: DEBUG nova.network.neutron [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1023.296959] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3667dfd-d2f7-485b-ae59-081900cef330 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.319873] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.453451] env[62070]: DEBUG nova.network.neutron [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Updating instance_info_cache with network_info: [{"id": "8372f59a-de9e-4062-be0b-39996b8f6c9e", "address": "fa:16:3e:5b:b2:cf", "network": {"id": "b8a611af-59c0-4ce4-9889-55902a16c816", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-841643028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3587df64b22e4cfc8220532cdda18c28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8372f59a-de", "ovs_interfaceid": "8372f59a-de9e-4062-be0b-39996b8f6c9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.716279] env[62070]: DEBUG nova.compute.manager [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Stashing vm_state: active {{(pid=62070) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1023.809744] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Creating Snapshot of the VM instance {{(pid=62070) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1023.810126] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b6becc3b-6ba9-4ef1-ab17-76734340da34 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.818446] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 1023.818446] env[62070]: value = "task-1122296" [ 1023.818446] env[62070]: _type = "Task" [ 1023.818446] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.822554] env[62070]: DEBUG nova.compute.manager [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1023.834278] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122296, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.850100] env[62070]: DEBUG nova.virt.hardware [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1023.850405] env[62070]: DEBUG nova.virt.hardware [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1023.850590] env[62070]: DEBUG nova.virt.hardware [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1023.850799] env[62070]: DEBUG nova.virt.hardware [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1023.850955] env[62070]: DEBUG nova.virt.hardware [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1023.851143] env[62070]: DEBUG nova.virt.hardware [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1023.851574] env[62070]: DEBUG nova.virt.hardware [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1023.851574] env[62070]: DEBUG nova.virt.hardware [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1023.851706] env[62070]: DEBUG nova.virt.hardware [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1023.851876] env[62070]: DEBUG nova.virt.hardware [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1023.852065] env[62070]: DEBUG nova.virt.hardware [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1023.853056] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6459ba05-fc8b-486a-9335-f8faab849ba3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.861324] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e2f7ba0-bc18-4234-98d6-9d847f588835 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.956098] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Releasing lock "refresh_cache-7bfda953-ac95-4dce-b7a7-c570eae35582" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1023.956420] env[62070]: DEBUG nova.compute.manager [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Instance network_info: |[{"id": "8372f59a-de9e-4062-be0b-39996b8f6c9e", "address": "fa:16:3e:5b:b2:cf", "network": {"id": "b8a611af-59c0-4ce4-9889-55902a16c816", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-841643028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3587df64b22e4cfc8220532cdda18c28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8372f59a-de", "ovs_interfaceid": "8372f59a-de9e-4062-be0b-39996b8f6c9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1023.956795] env[62070]: DEBUG oslo_concurrency.lockutils [req-52f68f23-57b8-4c87-bb70-1b8cb64ed13c req-a1b4158c-dc07-4aed-83b4-a63c13969e79 service nova] Acquired lock "refresh_cache-7bfda953-ac95-4dce-b7a7-c570eae35582" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.957059] env[62070]: DEBUG nova.network.neutron [req-52f68f23-57b8-4c87-bb70-1b8cb64ed13c req-a1b4158c-dc07-4aed-83b4-a63c13969e79 service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Refreshing network info cache for port 8372f59a-de9e-4062-be0b-39996b8f6c9e {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1023.958387] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:b2:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f4a795c-8718-4a7c-aafe-9da231df10f8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8372f59a-de9e-4062-be0b-39996b8f6c9e', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1023.965922] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Creating folder: Project (3587df64b22e4cfc8220532cdda18c28). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1023.969021] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-333f278b-6575-4062-b339-6c9b8a1a6ffb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.980228] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Created folder: Project (3587df64b22e4cfc8220532cdda18c28) in parent group-v245319. [ 1023.980441] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Creating folder: Instances. Parent ref: group-v245491. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1023.980691] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2504e9fd-57f5-4a65-b1d6-21099dbe6ebb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.989676] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Created folder: Instances in parent group-v245491. [ 1023.989914] env[62070]: DEBUG oslo.service.loopingcall [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1023.990125] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1023.990339] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a88b9275-7598-4326-9327-190719e86554 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.012126] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1024.012126] env[62070]: value = "task-1122299" [ 1024.012126] env[62070]: _type = "Task" [ 1024.012126] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.019877] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122299, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.166118] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2a9c8032-9eaa-42ed-988b-ffc6e40f7aff tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.364s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.167454] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.750s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.169942] env[62070]: INFO nova.compute.claims [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1024.241443] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.329185] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122296, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.526956] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122299, 'name': CreateVM_Task, 'duration_secs': 0.321505} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.528351] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1024.529233] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.530031] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.530409] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1024.532287] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-340aa7c3-2dca-4df6-98c2-2b7951ea85bf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.538428] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Waiting for the task: (returnval){ [ 1024.538428] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]529156c5-3d6b-a946-63a9-56cce1c1e371" [ 1024.538428] env[62070]: _type = "Task" [ 1024.538428] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.549055] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]529156c5-3d6b-a946-63a9-56cce1c1e371, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.563423] env[62070]: DEBUG nova.compute.manager [req-cc5f8089-2373-4cc1-ada2-ca5261fe1fa6 req-29a428b3-1a8f-44cc-95f9-631bcf8fc250 service nova] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Received event network-vif-plugged-df67c123-2618-45ce-8175-66a34206293a {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1024.563679] env[62070]: DEBUG oslo_concurrency.lockutils [req-cc5f8089-2373-4cc1-ada2-ca5261fe1fa6 req-29a428b3-1a8f-44cc-95f9-631bcf8fc250 service nova] Acquiring lock "1ababba6-838c-4ba6-bd83-e2b15aaf4b97-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.563899] env[62070]: DEBUG oslo_concurrency.lockutils [req-cc5f8089-2373-4cc1-ada2-ca5261fe1fa6 req-29a428b3-1a8f-44cc-95f9-631bcf8fc250 service nova] Lock "1ababba6-838c-4ba6-bd83-e2b15aaf4b97-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.564243] env[62070]: DEBUG oslo_concurrency.lockutils [req-cc5f8089-2373-4cc1-ada2-ca5261fe1fa6 req-29a428b3-1a8f-44cc-95f9-631bcf8fc250 service nova] Lock "1ababba6-838c-4ba6-bd83-e2b15aaf4b97-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.564493] env[62070]: DEBUG nova.compute.manager [req-cc5f8089-2373-4cc1-ada2-ca5261fe1fa6 req-29a428b3-1a8f-44cc-95f9-631bcf8fc250 service nova] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] No waiting events found dispatching network-vif-plugged-df67c123-2618-45ce-8175-66a34206293a {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1024.564720] env[62070]: WARNING nova.compute.manager [req-cc5f8089-2373-4cc1-ada2-ca5261fe1fa6 req-29a428b3-1a8f-44cc-95f9-631bcf8fc250 service nova] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Received unexpected event network-vif-plugged-df67c123-2618-45ce-8175-66a34206293a for instance with vm_state building and task_state spawning. [ 1024.583296] env[62070]: DEBUG nova.network.neutron [req-52f68f23-57b8-4c87-bb70-1b8cb64ed13c req-a1b4158c-dc07-4aed-83b4-a63c13969e79 service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Updated VIF entry in instance network info cache for port 8372f59a-de9e-4062-be0b-39996b8f6c9e. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1024.583703] env[62070]: DEBUG nova.network.neutron [req-52f68f23-57b8-4c87-bb70-1b8cb64ed13c req-a1b4158c-dc07-4aed-83b4-a63c13969e79 service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Updating instance_info_cache with network_info: [{"id": "8372f59a-de9e-4062-be0b-39996b8f6c9e", "address": "fa:16:3e:5b:b2:cf", "network": {"id": "b8a611af-59c0-4ce4-9889-55902a16c816", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-841643028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3587df64b22e4cfc8220532cdda18c28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8372f59a-de", "ovs_interfaceid": "8372f59a-de9e-4062-be0b-39996b8f6c9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.727815] env[62070]: INFO nova.scheduler.client.report [None req-2a9c8032-9eaa-42ed-988b-ffc6e40f7aff tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Deleted allocation for migration e930f8db-6e00-40a3-baf3-db252b53e586 [ 1024.830887] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122296, 'name': CreateSnapshot_Task, 'duration_secs': 0.619441} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.831179] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Created Snapshot of the VM instance {{(pid=62070) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1024.831929] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48370e5f-1b1d-4ea4-b278-36dad530aa16 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.049031] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]529156c5-3d6b-a946-63a9-56cce1c1e371, 'name': SearchDatastore_Task, 'duration_secs': 0.009091} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.049371] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1025.049627] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1025.049871] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1025.050038] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.050230] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1025.050515] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b4ef384c-82a6-4d5d-bf42-df98be17a6d1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.058416] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1025.058594] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1025.059305] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9906ec35-15e8-4b58-a9a0-a6b2e3307e18 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.064277] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Waiting for the task: (returnval){ [ 1025.064277] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]527b2667-34f8-4744-318c-70957464d81b" [ 1025.064277] env[62070]: _type = "Task" [ 1025.064277] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.071219] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527b2667-34f8-4744-318c-70957464d81b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.086229] env[62070]: DEBUG oslo_concurrency.lockutils [req-52f68f23-57b8-4c87-bb70-1b8cb64ed13c req-a1b4158c-dc07-4aed-83b4-a63c13969e79 service nova] Releasing lock "refresh_cache-7bfda953-ac95-4dce-b7a7-c570eae35582" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1025.120261] env[62070]: DEBUG nova.network.neutron [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Successfully updated port: df67c123-2618-45ce-8175-66a34206293a {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1025.143358] env[62070]: DEBUG nova.compute.manager [req-a083e381-3e5e-464e-b10d-8fe9036200a5 req-6bc41795-b039-4bc4-9761-010e739b945f service nova] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Received event network-changed-df67c123-2618-45ce-8175-66a34206293a {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1025.143596] env[62070]: DEBUG nova.compute.manager [req-a083e381-3e5e-464e-b10d-8fe9036200a5 req-6bc41795-b039-4bc4-9761-010e739b945f service nova] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Refreshing instance network info cache due to event network-changed-df67c123-2618-45ce-8175-66a34206293a. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1025.143766] env[62070]: DEBUG oslo_concurrency.lockutils [req-a083e381-3e5e-464e-b10d-8fe9036200a5 req-6bc41795-b039-4bc4-9761-010e739b945f service nova] Acquiring lock "refresh_cache-1ababba6-838c-4ba6-bd83-e2b15aaf4b97" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1025.143915] env[62070]: DEBUG oslo_concurrency.lockutils [req-a083e381-3e5e-464e-b10d-8fe9036200a5 req-6bc41795-b039-4bc4-9761-010e739b945f service nova] Acquired lock "refresh_cache-1ababba6-838c-4ba6-bd83-e2b15aaf4b97" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.144102] env[62070]: DEBUG nova.network.neutron [req-a083e381-3e5e-464e-b10d-8fe9036200a5 req-6bc41795-b039-4bc4-9761-010e739b945f service nova] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Refreshing network info cache for port df67c123-2618-45ce-8175-66a34206293a {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1025.233215] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2a9c8032-9eaa-42ed-988b-ffc6e40f7aff tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "71aead12-a182-40a7-b5a9-91c01271b800" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 12.897s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.349953] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Creating linked-clone VM from snapshot {{(pid=62070) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1025.352614] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-88b984ba-ed55-44dd-915a-a39eb7c70599 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.361764] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 1025.361764] env[62070]: value = "task-1122300" [ 1025.361764] env[62070]: _type = "Task" [ 1025.361764] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.371737] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122300, 'name': CloneVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.420480] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d77766f4-3eaa-4ea4-a1de-899db0018b8b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.427982] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cf6c1c9-51f4-4276-922c-4748882d1591 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.459069] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94618515-c23b-48bf-8bfb-03a88543368c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.466944] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8fa037f-328a-4f71-81fb-a053e42cc5fa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.481744] env[62070]: DEBUG nova.compute.provider_tree [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1025.574435] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527b2667-34f8-4744-318c-70957464d81b, 'name': SearchDatastore_Task, 'duration_secs': 0.007696} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.575512] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65fa23de-8337-4106-a54d-79763cdadbd9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.580760] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Waiting for the task: (returnval){ [ 1025.580760] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]525d67db-b358-454c-e353-e96e9f77309f" [ 1025.580760] env[62070]: _type = "Task" [ 1025.580760] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.589043] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]525d67db-b358-454c-e353-e96e9f77309f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.623104] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquiring lock "refresh_cache-1ababba6-838c-4ba6-bd83-e2b15aaf4b97" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1025.692639] env[62070]: DEBUG nova.network.neutron [req-a083e381-3e5e-464e-b10d-8fe9036200a5 req-6bc41795-b039-4bc4-9761-010e739b945f service nova] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1025.800057] env[62070]: DEBUG nova.network.neutron [req-a083e381-3e5e-464e-b10d-8fe9036200a5 req-6bc41795-b039-4bc4-9761-010e739b945f service nova] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.873251] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122300, 'name': CloneVM_Task} progress is 94%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.984644] env[62070]: DEBUG nova.scheduler.client.report [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1026.043115] env[62070]: DEBUG oslo_concurrency.lockutils [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "71aead12-a182-40a7-b5a9-91c01271b800" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1026.043440] env[62070]: DEBUG oslo_concurrency.lockutils [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "71aead12-a182-40a7-b5a9-91c01271b800" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.043701] env[62070]: DEBUG oslo_concurrency.lockutils [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "71aead12-a182-40a7-b5a9-91c01271b800-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1026.043918] env[62070]: DEBUG oslo_concurrency.lockutils [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "71aead12-a182-40a7-b5a9-91c01271b800-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.044130] env[62070]: DEBUG oslo_concurrency.lockutils [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "71aead12-a182-40a7-b5a9-91c01271b800-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.046442] env[62070]: INFO nova.compute.manager [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Terminating instance [ 1026.048481] env[62070]: DEBUG nova.compute.manager [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1026.048696] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1026.049686] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0454fa02-1f32-4b80-a157-918b4348b2db {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.057887] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1026.058503] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a4d3252e-f988-4557-8b75-20a06c325415 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.065464] env[62070]: DEBUG oslo_vmware.api [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1026.065464] env[62070]: value = "task-1122301" [ 1026.065464] env[62070]: _type = "Task" [ 1026.065464] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.073871] env[62070]: DEBUG oslo_vmware.api [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122301, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.089687] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]525d67db-b358-454c-e353-e96e9f77309f, 'name': SearchDatastore_Task, 'duration_secs': 0.025532} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.089957] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1026.090281] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 7bfda953-ac95-4dce-b7a7-c570eae35582/7bfda953-ac95-4dce-b7a7-c570eae35582.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1026.090580] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a82a461a-3a3a-44cc-acde-aca000bf7cb2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.098868] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Waiting for the task: (returnval){ [ 1026.098868] env[62070]: value = "task-1122302" [ 1026.098868] env[62070]: _type = "Task" [ 1026.098868] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.106847] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': task-1122302, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.304925] env[62070]: DEBUG oslo_concurrency.lockutils [req-a083e381-3e5e-464e-b10d-8fe9036200a5 req-6bc41795-b039-4bc4-9761-010e739b945f service nova] Releasing lock "refresh_cache-1ababba6-838c-4ba6-bd83-e2b15aaf4b97" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1026.304925] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquired lock "refresh_cache-1ababba6-838c-4ba6-bd83-e2b15aaf4b97" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.304925] env[62070]: DEBUG nova.network.neutron [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1026.374728] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122300, 'name': CloneVM_Task} progress is 94%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.490877] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.323s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.491445] env[62070]: DEBUG nova.compute.manager [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1026.494231] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 7.035s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.494411] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.494640] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62070) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1026.494986] env[62070]: DEBUG oslo_concurrency.lockutils [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.519s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.495230] env[62070]: DEBUG nova.objects.instance [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lazy-loading 'resources' on Instance uuid 5ec9074b-1237-4404-b13c-a7ca0dbe1d43 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1026.497728] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3474beff-1ce9-458d-ac6f-7d43e92243ae {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.507687] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e0f40d-b958-4536-95cf-159d7eeb0e6b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.532783] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b43289-3176-43f7-bda4-34f2d70ab276 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.544780] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef0a51dc-ac3f-444f-822b-3c6038998262 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.575881] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179518MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=62070) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1026.576080] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1026.583822] env[62070]: DEBUG oslo_vmware.api [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122301, 'name': PowerOffVM_Task, 'duration_secs': 0.215366} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.584055] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1026.584244] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1026.584492] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d7318562-6142-4592-a7dc-148f858c9198 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.608557] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': task-1122302, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.4645} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.608822] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 7bfda953-ac95-4dce-b7a7-c570eae35582/7bfda953-ac95-4dce-b7a7-c570eae35582.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1026.609054] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1026.609306] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-caa0bf03-01a0-49c1-b35b-126e626b0e7d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.615769] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Waiting for the task: (returnval){ [ 1026.615769] env[62070]: value = "task-1122304" [ 1026.615769] env[62070]: _type = "Task" [ 1026.615769] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.622932] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': task-1122304, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.660985] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1026.661299] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1026.661515] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Deleting the datastore file [datastore2] 71aead12-a182-40a7-b5a9-91c01271b800 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1026.661794] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-73118c84-754b-4688-9caa-ae49891f3909 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.668605] env[62070]: DEBUG oslo_vmware.api [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1026.668605] env[62070]: value = "task-1122305" [ 1026.668605] env[62070]: _type = "Task" [ 1026.668605] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.678905] env[62070]: DEBUG oslo_vmware.api [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122305, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.846267] env[62070]: DEBUG nova.network.neutron [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1026.872685] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122300, 'name': CloneVM_Task} progress is 95%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.998351] env[62070]: DEBUG nova.compute.utils [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1027.008817] env[62070]: DEBUG nova.compute.manager [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1027.008817] env[62070]: DEBUG nova.network.neutron [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1027.058363] env[62070]: DEBUG nova.network.neutron [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Updating instance_info_cache with network_info: [{"id": "df67c123-2618-45ce-8175-66a34206293a", "address": "fa:16:3e:38:62:7a", "network": {"id": "fd8b220c-f20c-489e-9c20-28b886709536", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-452165452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1960779e94c4e119497a0c1117f54fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf67c123-26", "ovs_interfaceid": "df67c123-2618-45ce-8175-66a34206293a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.061976] env[62070]: DEBUG nova.policy [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'db9baf29d0b5489da2657286bfd695c0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91e246e32f29422e90fae974cfee9d8f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 1027.126652] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': task-1122304, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096991} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.126930] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1027.127898] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52bb77c-473b-41dd-8fec-63e6f6118233 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.150341] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 7bfda953-ac95-4dce-b7a7-c570eae35582/7bfda953-ac95-4dce-b7a7-c570eae35582.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1027.153431] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4659b589-98b6-4382-bfe4-11fc54c424ff {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.174356] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Waiting for the task: (returnval){ [ 1027.174356] env[62070]: value = "task-1122306" [ 1027.174356] env[62070]: _type = "Task" [ 1027.174356] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.180508] env[62070]: DEBUG oslo_vmware.api [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122305, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141358} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.183280] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1027.183481] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1027.183668] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1027.183849] env[62070]: INFO nova.compute.manager [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1027.184115] env[62070]: DEBUG oslo.service.loopingcall [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1027.184497] env[62070]: DEBUG nova.compute.manager [-] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1027.184594] env[62070]: DEBUG nova.network.neutron [-] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1027.189204] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': task-1122306, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.301578] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d03720b-2d44-46a1-9b83-873c7fa0ca9b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.308961] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-834a23c4-df5c-4c8e-84a9-795cf6f45e83 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.345074] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c5dae5-a88c-4377-a170-24920a5f0e78 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.354099] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8a0a4b4-1a76-4571-8ad4-0f8516dfb63c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.367450] env[62070]: DEBUG nova.compute.provider_tree [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1027.377278] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122300, 'name': CloneVM_Task} progress is 95%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.462136] env[62070]: DEBUG nova.network.neutron [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Successfully created port: eca3debc-4e8e-4e1b-af00-7eca703ad396 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1027.507554] env[62070]: DEBUG nova.compute.manager [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1027.560748] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Releasing lock "refresh_cache-1ababba6-838c-4ba6-bd83-e2b15aaf4b97" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1027.561094] env[62070]: DEBUG nova.compute.manager [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Instance network_info: |[{"id": "df67c123-2618-45ce-8175-66a34206293a", "address": "fa:16:3e:38:62:7a", "network": {"id": "fd8b220c-f20c-489e-9c20-28b886709536", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-452165452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1960779e94c4e119497a0c1117f54fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf67c123-26", "ovs_interfaceid": "df67c123-2618-45ce-8175-66a34206293a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1027.561603] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:62:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7bcd9d2d-25c8-41ad-9a4a-93b9029ba993', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'df67c123-2618-45ce-8175-66a34206293a', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1027.570515] env[62070]: DEBUG oslo.service.loopingcall [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1027.571266] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1027.572168] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0b386eb3-12c9-4cb4-a2d5-6461f4a019e8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.593952] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1027.593952] env[62070]: value = "task-1122307" [ 1027.593952] env[62070]: _type = "Task" [ 1027.593952] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.602790] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122307, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.687207] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': task-1122306, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.872520] env[62070]: DEBUG nova.scheduler.client.report [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1027.884956] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122300, 'name': CloneVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.021943] env[62070]: DEBUG nova.compute.manager [req-d52801f7-83ee-4a6e-93cb-7e46c2ecd4cf req-f7b01be2-00e2-41e3-b7fb-bf4ca2571fb7 service nova] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Received event network-vif-deleted-a3ed0957-14c2-4144-8d45-f4a0e5cb45ab {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1028.022176] env[62070]: INFO nova.compute.manager [req-d52801f7-83ee-4a6e-93cb-7e46c2ecd4cf req-f7b01be2-00e2-41e3-b7fb-bf4ca2571fb7 service nova] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Neutron deleted interface a3ed0957-14c2-4144-8d45-f4a0e5cb45ab; detaching it from the instance and deleting it from the info cache [ 1028.022360] env[62070]: DEBUG nova.network.neutron [req-d52801f7-83ee-4a6e-93cb-7e46c2ecd4cf req-f7b01be2-00e2-41e3-b7fb-bf4ca2571fb7 service nova] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.105084] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122307, 'name': CreateVM_Task, 'duration_secs': 0.425727} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.105270] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1028.105967] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1028.106164] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.106478] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1028.106731] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25f97359-af25-46d1-9502-3ea860b88efe {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.111748] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Waiting for the task: (returnval){ [ 1028.111748] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]527068f7-d222-91a7-ec59-50db3136b2d9" [ 1028.111748] env[62070]: _type = "Task" [ 1028.111748] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.119525] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527068f7-d222-91a7-ec59-50db3136b2d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.187860] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': task-1122306, 'name': ReconfigVM_Task, 'duration_secs': 0.819786} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.188218] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 7bfda953-ac95-4dce-b7a7-c570eae35582/7bfda953-ac95-4dce-b7a7-c570eae35582.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1028.188900] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7ed29a49-35c7-441d-b3d4-31c6445595a8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.195148] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Waiting for the task: (returnval){ [ 1028.195148] env[62070]: value = "task-1122308" [ 1028.195148] env[62070]: _type = "Task" [ 1028.195148] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.205629] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': task-1122308, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.377408] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122300, 'name': CloneVM_Task, 'duration_secs': 2.576124} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.377699] env[62070]: INFO nova.virt.vmwareapi.vmops [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Created linked-clone VM from snapshot [ 1028.378450] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cc35331-35fc-411b-a865-22c1e3d07113 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.381646] env[62070]: DEBUG oslo_concurrency.lockutils [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.887s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.386448] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.714s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.387944] env[62070]: INFO nova.compute.claims [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1028.392558] env[62070]: DEBUG nova.virt.vmwareapi.images [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Uploading image 24ed4c28-b352-4867-857b-17f9624cc455 {{(pid=62070) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 1028.403332] env[62070]: INFO nova.scheduler.client.report [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Deleted allocations for instance 5ec9074b-1237-4404-b13c-a7ca0dbe1d43 [ 1028.418352] env[62070]: DEBUG oslo_vmware.rw_handles [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1028.418352] env[62070]: value = "vm-245495" [ 1028.418352] env[62070]: _type = "VirtualMachine" [ 1028.418352] env[62070]: }. {{(pid=62070) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1028.418659] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-834d0040-4489-4c47-ab73-7335d79e294c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.426512] env[62070]: DEBUG oslo_vmware.rw_handles [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lease: (returnval){ [ 1028.426512] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52225a74-f01d-6ebf-4dff-1c5bba9143ec" [ 1028.426512] env[62070]: _type = "HttpNfcLease" [ 1028.426512] env[62070]: } obtained for exporting VM: (result){ [ 1028.426512] env[62070]: value = "vm-245495" [ 1028.426512] env[62070]: _type = "VirtualMachine" [ 1028.426512] env[62070]: }. {{(pid=62070) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1028.426743] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the lease: (returnval){ [ 1028.426743] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52225a74-f01d-6ebf-4dff-1c5bba9143ec" [ 1028.426743] env[62070]: _type = "HttpNfcLease" [ 1028.426743] env[62070]: } to be ready. {{(pid=62070) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1028.433010] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1028.433010] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52225a74-f01d-6ebf-4dff-1c5bba9143ec" [ 1028.433010] env[62070]: _type = "HttpNfcLease" [ 1028.433010] env[62070]: } is initializing. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1028.499544] env[62070]: DEBUG nova.network.neutron [-] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.522292] env[62070]: DEBUG nova.compute.manager [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1028.528835] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c1598ab9-31cf-41a0-9122-3e59b59dca49 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.538078] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33fa1fc-59b1-49ec-ada4-7e30b1f1a144 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.556552] env[62070]: DEBUG nova.virt.hardware [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1028.556859] env[62070]: DEBUG nova.virt.hardware [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1028.556979] env[62070]: DEBUG nova.virt.hardware [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1028.557178] env[62070]: DEBUG nova.virt.hardware [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1028.557337] env[62070]: DEBUG nova.virt.hardware [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1028.557483] env[62070]: DEBUG nova.virt.hardware [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1028.557694] env[62070]: DEBUG nova.virt.hardware [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1028.557857] env[62070]: DEBUG nova.virt.hardware [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1028.558079] env[62070]: DEBUG nova.virt.hardware [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1028.558285] env[62070]: DEBUG nova.virt.hardware [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1028.558470] env[62070]: DEBUG nova.virt.hardware [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1028.559258] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45d5ce6d-0241-464e-9649-1d48052e6ca7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.570638] env[62070]: DEBUG nova.compute.manager [req-d52801f7-83ee-4a6e-93cb-7e46c2ecd4cf req-f7b01be2-00e2-41e3-b7fb-bf4ca2571fb7 service nova] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Detach interface failed, port_id=a3ed0957-14c2-4144-8d45-f4a0e5cb45ab, reason: Instance 71aead12-a182-40a7-b5a9-91c01271b800 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1028.575663] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae5bb83-85de-4f88-8b8f-9b77264f5de0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.621402] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527068f7-d222-91a7-ec59-50db3136b2d9, 'name': SearchDatastore_Task, 'duration_secs': 0.009512} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.621690] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1028.621938] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1028.622195] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1028.622347] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.622528] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1028.622782] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-56ca297f-74ee-4144-9c5e-7117f3f67761 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.630835] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1028.631014] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1028.631686] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee52d18c-2f83-49ce-beaa-7ab8aab360e3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.638114] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Waiting for the task: (returnval){ [ 1028.638114] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5235d481-f223-9d2d-825a-b0b81ea38752" [ 1028.638114] env[62070]: _type = "Task" [ 1028.638114] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.645205] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5235d481-f223-9d2d-825a-b0b81ea38752, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.703920] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': task-1122308, 'name': Rename_Task, 'duration_secs': 0.13783} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.704200] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1028.704437] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-85b619af-39d9-4ee1-a21f-464bf6cda065 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.710182] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Waiting for the task: (returnval){ [ 1028.710182] env[62070]: value = "task-1122310" [ 1028.710182] env[62070]: _type = "Task" [ 1028.710182] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.717086] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': task-1122310, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.910912] env[62070]: DEBUG oslo_concurrency.lockutils [None req-da75baf3-0e49-4102-b74a-ac4b6d4ddcb1 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "5ec9074b-1237-4404-b13c-a7ca0dbe1d43" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.455s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.935298] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1028.935298] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52225a74-f01d-6ebf-4dff-1c5bba9143ec" [ 1028.935298] env[62070]: _type = "HttpNfcLease" [ 1028.935298] env[62070]: } is ready. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1028.935573] env[62070]: DEBUG oslo_vmware.rw_handles [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1028.935573] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52225a74-f01d-6ebf-4dff-1c5bba9143ec" [ 1028.935573] env[62070]: _type = "HttpNfcLease" [ 1028.935573] env[62070]: }. {{(pid=62070) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1028.936443] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd54257-6b05-4f20-8a5e-6ae882161e70 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.943515] env[62070]: DEBUG oslo_vmware.rw_handles [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527d396b-e006-c5ea-747b-2ea21c31af58/disk-0.vmdk from lease info. {{(pid=62070) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1028.943689] env[62070]: DEBUG oslo_vmware.rw_handles [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527d396b-e006-c5ea-747b-2ea21c31af58/disk-0.vmdk for reading. {{(pid=62070) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1029.001883] env[62070]: INFO nova.compute.manager [-] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Took 1.82 seconds to deallocate network for instance. [ 1029.039229] env[62070]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f037aab9-481a-4713-a6ca-360c29862efd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.151822] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5235d481-f223-9d2d-825a-b0b81ea38752, 'name': SearchDatastore_Task, 'duration_secs': 0.009166} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.153425] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50ecc9bb-2c1b-4510-bc85-03a1dd858db0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.159142] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Waiting for the task: (returnval){ [ 1029.159142] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ea6ad3-dca1-4994-0e8c-4b496da99028" [ 1029.159142] env[62070]: _type = "Task" [ 1029.159142] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.170426] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52ea6ad3-dca1-4994-0e8c-4b496da99028, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.221585] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': task-1122310, 'name': PowerOnVM_Task} progress is 93%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.390525] env[62070]: DEBUG nova.network.neutron [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Successfully updated port: eca3debc-4e8e-4e1b-af00-7eca703ad396 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1029.509872] env[62070]: DEBUG oslo_concurrency.lockutils [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.635983] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88883ee7-88f0-4df2-afc9-922ec36515ff {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.644448] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3efcd83-c62f-4432-a2bd-f60877a53196 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.678752] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5eee44e-dc59-4b34-a2d1-78d03ece5dd9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.687164] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52ea6ad3-dca1-4994-0e8c-4b496da99028, 'name': SearchDatastore_Task, 'duration_secs': 0.010755} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.689311] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.689610] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 1ababba6-838c-4ba6-bd83-e2b15aaf4b97/1ababba6-838c-4ba6-bd83-e2b15aaf4b97.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1029.690085] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-671765b0-8c4b-4fa3-aafd-653eb4617959 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.692933] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f75a96-e3e9-4e5d-bbdc-c7b4c88fe9fd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.707213] env[62070]: DEBUG nova.compute.provider_tree [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1029.709931] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Waiting for the task: (returnval){ [ 1029.709931] env[62070]: value = "task-1122311" [ 1029.709931] env[62070]: _type = "Task" [ 1029.709931] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.721620] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122311, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.725036] env[62070]: DEBUG oslo_vmware.api [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': task-1122310, 'name': PowerOnVM_Task, 'duration_secs': 0.588264} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.725393] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1029.725637] env[62070]: INFO nova.compute.manager [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Took 8.29 seconds to spawn the instance on the hypervisor. [ 1029.725849] env[62070]: DEBUG nova.compute.manager [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1029.726719] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7bfd890-8714-4bad-b78d-5e2efc0f431e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.894560] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "refresh_cache-6b6e22b9-71fb-4139-993a-7b9fcf89d8e0" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1029.894743] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired lock "refresh_cache-6b6e22b9-71fb-4139-993a-7b9fcf89d8e0" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.894827] env[62070]: DEBUG nova.network.neutron [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1029.918545] env[62070]: DEBUG oslo_concurrency.lockutils [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "3d22f50a-e1b7-48f9-a044-df64d01dfeb4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.918980] env[62070]: DEBUG oslo_concurrency.lockutils [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "3d22f50a-e1b7-48f9-a044-df64d01dfeb4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.919397] env[62070]: DEBUG oslo_concurrency.lockutils [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "3d22f50a-e1b7-48f9-a044-df64d01dfeb4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.919659] env[62070]: DEBUG oslo_concurrency.lockutils [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "3d22f50a-e1b7-48f9-a044-df64d01dfeb4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.920079] env[62070]: DEBUG oslo_concurrency.lockutils [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "3d22f50a-e1b7-48f9-a044-df64d01dfeb4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.923496] env[62070]: INFO nova.compute.manager [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Terminating instance [ 1029.925846] env[62070]: DEBUG nova.compute.manager [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1029.925937] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1029.926902] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b83bcc76-1996-4d3a-8fd2-f6c9252bb041 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.935592] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1029.935976] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b1f63a41-cb02-4fbc-bc5f-8093e1e05811 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.944022] env[62070]: DEBUG oslo_vmware.api [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 1029.944022] env[62070]: value = "task-1122312" [ 1029.944022] env[62070]: _type = "Task" [ 1029.944022] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.953204] env[62070]: DEBUG oslo_vmware.api [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122312, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.054590] env[62070]: DEBUG nova.compute.manager [req-6ed8943d-419d-423a-8b53-472c892cc56c req-d750ea88-7028-4010-bbc8-779213ec01b9 service nova] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Received event network-vif-plugged-eca3debc-4e8e-4e1b-af00-7eca703ad396 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1030.054931] env[62070]: DEBUG oslo_concurrency.lockutils [req-6ed8943d-419d-423a-8b53-472c892cc56c req-d750ea88-7028-4010-bbc8-779213ec01b9 service nova] Acquiring lock "6b6e22b9-71fb-4139-993a-7b9fcf89d8e0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.055329] env[62070]: DEBUG oslo_concurrency.lockutils [req-6ed8943d-419d-423a-8b53-472c892cc56c req-d750ea88-7028-4010-bbc8-779213ec01b9 service nova] Lock "6b6e22b9-71fb-4139-993a-7b9fcf89d8e0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.055607] env[62070]: DEBUG oslo_concurrency.lockutils [req-6ed8943d-419d-423a-8b53-472c892cc56c req-d750ea88-7028-4010-bbc8-779213ec01b9 service nova] Lock "6b6e22b9-71fb-4139-993a-7b9fcf89d8e0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.055975] env[62070]: DEBUG nova.compute.manager [req-6ed8943d-419d-423a-8b53-472c892cc56c req-d750ea88-7028-4010-bbc8-779213ec01b9 service nova] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] No waiting events found dispatching network-vif-plugged-eca3debc-4e8e-4e1b-af00-7eca703ad396 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1030.056645] env[62070]: WARNING nova.compute.manager [req-6ed8943d-419d-423a-8b53-472c892cc56c req-d750ea88-7028-4010-bbc8-779213ec01b9 service nova] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Received unexpected event network-vif-plugged-eca3debc-4e8e-4e1b-af00-7eca703ad396 for instance with vm_state building and task_state spawning. [ 1030.056645] env[62070]: DEBUG nova.compute.manager [req-6ed8943d-419d-423a-8b53-472c892cc56c req-d750ea88-7028-4010-bbc8-779213ec01b9 service nova] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Received event network-changed-eca3debc-4e8e-4e1b-af00-7eca703ad396 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1030.056851] env[62070]: DEBUG nova.compute.manager [req-6ed8943d-419d-423a-8b53-472c892cc56c req-d750ea88-7028-4010-bbc8-779213ec01b9 service nova] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Refreshing instance network info cache due to event network-changed-eca3debc-4e8e-4e1b-af00-7eca703ad396. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1030.057069] env[62070]: DEBUG oslo_concurrency.lockutils [req-6ed8943d-419d-423a-8b53-472c892cc56c req-d750ea88-7028-4010-bbc8-779213ec01b9 service nova] Acquiring lock "refresh_cache-6b6e22b9-71fb-4139-993a-7b9fcf89d8e0" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1030.212332] env[62070]: DEBUG nova.scheduler.client.report [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1030.226396] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122311, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483755} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.226467] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 1ababba6-838c-4ba6-bd83-e2b15aaf4b97/1ababba6-838c-4ba6-bd83-e2b15aaf4b97.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1030.226810] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1030.227027] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aa75c552-e4e5-4ff2-bc0b-9ef247a4bae4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.235337] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Waiting for the task: (returnval){ [ 1030.235337] env[62070]: value = "task-1122313" [ 1030.235337] env[62070]: _type = "Task" [ 1030.235337] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.251189] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122313, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.251969] env[62070]: INFO nova.compute.manager [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Took 17.33 seconds to build instance. [ 1030.425516] env[62070]: DEBUG nova.network.neutron [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1030.456742] env[62070]: DEBUG oslo_vmware.api [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122312, 'name': PowerOffVM_Task, 'duration_secs': 0.356933} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.457118] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1030.457308] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1030.457687] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-31a8926b-1397-455f-bd4f-35bb0a46e753 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.589595] env[62070]: DEBUG nova.network.neutron [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Updating instance_info_cache with network_info: [{"id": "eca3debc-4e8e-4e1b-af00-7eca703ad396", "address": "fa:16:3e:e5:7c:d2", "network": {"id": "516790be-56b8-409d-b1c0-a8683a45a9ec", "bridge": "br-int", "label": "tempest-ServersTestJSON-693737631-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91e246e32f29422e90fae974cfee9d8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeca3debc-4e", "ovs_interfaceid": "eca3debc-4e8e-4e1b-af00-7eca703ad396", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.644454] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1030.644853] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1030.645224] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Deleting the datastore file [datastore1] 3d22f50a-e1b7-48f9-a044-df64d01dfeb4 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1030.645579] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-611977ba-3319-49c5-a624-c7303d6836e4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.652641] env[62070]: DEBUG oslo_vmware.api [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for the task: (returnval){ [ 1030.652641] env[62070]: value = "task-1122315" [ 1030.652641] env[62070]: _type = "Task" [ 1030.652641] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.660763] env[62070]: DEBUG oslo_vmware.api [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122315, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.721473] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.335s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.722091] env[62070]: DEBUG nova.compute.manager [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1030.724973] env[62070]: DEBUG oslo_concurrency.lockutils [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.827s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.725511] env[62070]: DEBUG oslo_concurrency.lockutils [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.728315] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.638s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.728770] env[62070]: DEBUG nova.objects.instance [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lazy-loading 'resources' on Instance uuid 84c00e4a-20d3-4739-8535-e27076d85a89 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1030.749620] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122313, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066978} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.749620] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1030.749620] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-166c1d3b-1382-476d-b763-8adf8301ee43 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.754515] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c27c27b-0ed3-4e75-bd51-ec4e9e507275 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Lock "7bfda953-ac95-4dce-b7a7-c570eae35582" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.838s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.774977] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] 1ababba6-838c-4ba6-bd83-e2b15aaf4b97/1ababba6-838c-4ba6-bd83-e2b15aaf4b97.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1030.776223] env[62070]: INFO nova.scheduler.client.report [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Deleted allocations for instance b101c79a-abfd-4104-aaed-096995fb2337 [ 1030.777538] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-854d4552-2377-475c-8511-0dc18da491df {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.803191] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Waiting for the task: (returnval){ [ 1030.803191] env[62070]: value = "task-1122316" [ 1030.803191] env[62070]: _type = "Task" [ 1030.803191] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.811409] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122316, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.992703] env[62070]: DEBUG nova.compute.manager [req-bcb49941-995a-4dfd-a7a9-306bc076cc60 req-b5d927d2-5173-4316-98ff-90ac2ff758a6 service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Received event network-changed-8372f59a-de9e-4062-be0b-39996b8f6c9e {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1030.993115] env[62070]: DEBUG nova.compute.manager [req-bcb49941-995a-4dfd-a7a9-306bc076cc60 req-b5d927d2-5173-4316-98ff-90ac2ff758a6 service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Refreshing instance network info cache due to event network-changed-8372f59a-de9e-4062-be0b-39996b8f6c9e. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1030.993434] env[62070]: DEBUG oslo_concurrency.lockutils [req-bcb49941-995a-4dfd-a7a9-306bc076cc60 req-b5d927d2-5173-4316-98ff-90ac2ff758a6 service nova] Acquiring lock "refresh_cache-7bfda953-ac95-4dce-b7a7-c570eae35582" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1030.993698] env[62070]: DEBUG oslo_concurrency.lockutils [req-bcb49941-995a-4dfd-a7a9-306bc076cc60 req-b5d927d2-5173-4316-98ff-90ac2ff758a6 service nova] Acquired lock "refresh_cache-7bfda953-ac95-4dce-b7a7-c570eae35582" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.993926] env[62070]: DEBUG nova.network.neutron [req-bcb49941-995a-4dfd-a7a9-306bc076cc60 req-b5d927d2-5173-4316-98ff-90ac2ff758a6 service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Refreshing network info cache for port 8372f59a-de9e-4062-be0b-39996b8f6c9e {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1031.095022] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Releasing lock "refresh_cache-6b6e22b9-71fb-4139-993a-7b9fcf89d8e0" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1031.095022] env[62070]: DEBUG nova.compute.manager [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Instance network_info: |[{"id": "eca3debc-4e8e-4e1b-af00-7eca703ad396", "address": "fa:16:3e:e5:7c:d2", "network": {"id": "516790be-56b8-409d-b1c0-a8683a45a9ec", "bridge": "br-int", "label": "tempest-ServersTestJSON-693737631-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91e246e32f29422e90fae974cfee9d8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeca3debc-4e", "ovs_interfaceid": "eca3debc-4e8e-4e1b-af00-7eca703ad396", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1031.095022] env[62070]: DEBUG oslo_concurrency.lockutils [req-6ed8943d-419d-423a-8b53-472c892cc56c req-d750ea88-7028-4010-bbc8-779213ec01b9 service nova] Acquired lock "refresh_cache-6b6e22b9-71fb-4139-993a-7b9fcf89d8e0" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.095022] env[62070]: DEBUG nova.network.neutron [req-6ed8943d-419d-423a-8b53-472c892cc56c req-d750ea88-7028-4010-bbc8-779213ec01b9 service nova] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Refreshing network info cache for port eca3debc-4e8e-4e1b-af00-7eca703ad396 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1031.095022] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:7c:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '359850cc-b061-4c9c-a61c-eb42e0f7c359', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eca3debc-4e8e-4e1b-af00-7eca703ad396', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1031.110853] env[62070]: DEBUG oslo.service.loopingcall [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1031.112258] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1031.112652] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5b9bc9b5-ba03-4be6-981f-dd5c059eab4e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.137052] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1031.137052] env[62070]: value = "task-1122317" [ 1031.137052] env[62070]: _type = "Task" [ 1031.137052] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.145787] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122317, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.162693] env[62070]: DEBUG oslo_vmware.api [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Task: {'id': task-1122315, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142929} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.163151] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1031.163440] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1031.163690] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1031.163928] env[62070]: INFO nova.compute.manager [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1031.164247] env[62070]: DEBUG oslo.service.loopingcall [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1031.164515] env[62070]: DEBUG nova.compute.manager [-] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1031.164659] env[62070]: DEBUG nova.network.neutron [-] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1031.232910] env[62070]: DEBUG nova.compute.utils [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1031.240310] env[62070]: DEBUG nova.compute.manager [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1031.240310] env[62070]: DEBUG nova.network.neutron [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1031.302942] env[62070]: DEBUG oslo_concurrency.lockutils [None req-18f82941-8172-482c-8da4-c91042c7d115 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "b101c79a-abfd-4104-aaed-096995fb2337" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.326s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.315625] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122316, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.351164] env[62070]: DEBUG nova.policy [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '85707399ddf04d03bfb487560df1212e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd079c0ef3ed745fcaf69dc728dca4466', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 1031.537780] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdacab63-5eac-4b8d-b80b-001e64a8d703 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.549379] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aafeb36a-65ec-40d3-97c3-517bcfd30475 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.587079] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a78b49a-bed3-45f1-b7d3-41c4e2333934 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.597598] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-330e3f0a-5ff5-49e2-8365-417396d25601 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.617915] env[62070]: DEBUG nova.compute.provider_tree [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.648528] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122317, 'name': CreateVM_Task, 'duration_secs': 0.508841} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.648883] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1031.649831] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.650129] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.650559] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1031.651044] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f4a09fe-163e-4381-aaa8-7574d40f6b89 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.656594] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 1031.656594] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]529d8df5-3231-1be8-871a-2588d43f9164" [ 1031.656594] env[62070]: _type = "Task" [ 1031.656594] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.666602] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]529d8df5-3231-1be8-871a-2588d43f9164, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.743019] env[62070]: DEBUG nova.compute.manager [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1031.817665] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122316, 'name': ReconfigVM_Task, 'duration_secs': 0.660637} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.818115] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Reconfigured VM instance instance-00000062 to attach disk [datastore1] 1ababba6-838c-4ba6-bd83-e2b15aaf4b97/1ababba6-838c-4ba6-bd83-e2b15aaf4b97.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1031.818894] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-72ac871b-2e0a-4c8a-87dc-9f69cd4ef1b7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.825621] env[62070]: DEBUG nova.network.neutron [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Successfully created port: e07c1b27-68ca-4e3b-8554-fb5fabd9c0f4 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1031.829575] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Waiting for the task: (returnval){ [ 1031.829575] env[62070]: value = "task-1122318" [ 1031.829575] env[62070]: _type = "Task" [ 1031.829575] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.838835] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122318, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.907480] env[62070]: DEBUG nova.network.neutron [req-bcb49941-995a-4dfd-a7a9-306bc076cc60 req-b5d927d2-5173-4316-98ff-90ac2ff758a6 service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Updated VIF entry in instance network info cache for port 8372f59a-de9e-4062-be0b-39996b8f6c9e. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1031.907948] env[62070]: DEBUG nova.network.neutron [req-bcb49941-995a-4dfd-a7a9-306bc076cc60 req-b5d927d2-5173-4316-98ff-90ac2ff758a6 service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Updating instance_info_cache with network_info: [{"id": "8372f59a-de9e-4062-be0b-39996b8f6c9e", "address": "fa:16:3e:5b:b2:cf", "network": {"id": "b8a611af-59c0-4ce4-9889-55902a16c816", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-841643028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3587df64b22e4cfc8220532cdda18c28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8372f59a-de", "ovs_interfaceid": "8372f59a-de9e-4062-be0b-39996b8f6c9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.966072] env[62070]: DEBUG nova.network.neutron [req-6ed8943d-419d-423a-8b53-472c892cc56c req-d750ea88-7028-4010-bbc8-779213ec01b9 service nova] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Updated VIF entry in instance network info cache for port eca3debc-4e8e-4e1b-af00-7eca703ad396. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1031.966072] env[62070]: DEBUG nova.network.neutron [req-6ed8943d-419d-423a-8b53-472c892cc56c req-d750ea88-7028-4010-bbc8-779213ec01b9 service nova] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Updating instance_info_cache with network_info: [{"id": "eca3debc-4e8e-4e1b-af00-7eca703ad396", "address": "fa:16:3e:e5:7c:d2", "network": {"id": "516790be-56b8-409d-b1c0-a8683a45a9ec", "bridge": "br-int", "label": "tempest-ServersTestJSON-693737631-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91e246e32f29422e90fae974cfee9d8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeca3debc-4e", "ovs_interfaceid": "eca3debc-4e8e-4e1b-af00-7eca703ad396", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.093590] env[62070]: DEBUG nova.compute.manager [req-6512bafa-7155-4c38-81f1-e62de8476756 req-1b395d07-c5ec-4dac-82cf-9b9c19180ba0 service nova] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Received event network-vif-deleted-aef8b9b0-4bbd-4af6-b65d-f7e964775fd4 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1032.093590] env[62070]: INFO nova.compute.manager [req-6512bafa-7155-4c38-81f1-e62de8476756 req-1b395d07-c5ec-4dac-82cf-9b9c19180ba0 service nova] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Neutron deleted interface aef8b9b0-4bbd-4af6-b65d-f7e964775fd4; detaching it from the instance and deleting it from the info cache [ 1032.093590] env[62070]: DEBUG nova.network.neutron [req-6512bafa-7155-4c38-81f1-e62de8476756 req-1b395d07-c5ec-4dac-82cf-9b9c19180ba0 service nova] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.123117] env[62070]: DEBUG nova.scheduler.client.report [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1032.133270] env[62070]: DEBUG nova.network.neutron [-] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.168931] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]529d8df5-3231-1be8-871a-2588d43f9164, 'name': SearchDatastore_Task, 'duration_secs': 0.010146} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.169287] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1032.169538] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1032.169784] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1032.169935] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.170143] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1032.170457] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-805e19c5-b6ef-4003-b645-2e1f9d51192f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.179705] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1032.179927] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1032.180656] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-318c94dd-1f09-48f2-b03e-55b8066914f6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.186251] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 1032.186251] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]522a4d38-4f2f-d5bb-d27d-7f4545b60e66" [ 1032.186251] env[62070]: _type = "Task" [ 1032.186251] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.195284] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]522a4d38-4f2f-d5bb-d27d-7f4545b60e66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.340759] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122318, 'name': Rename_Task, 'duration_secs': 0.2129} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.341055] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1032.341319] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e05c77f1-fd48-442f-b6b2-6b0557a2f510 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.348210] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Waiting for the task: (returnval){ [ 1032.348210] env[62070]: value = "task-1122319" [ 1032.348210] env[62070]: _type = "Task" [ 1032.348210] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.358718] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122319, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.411280] env[62070]: DEBUG oslo_concurrency.lockutils [req-bcb49941-995a-4dfd-a7a9-306bc076cc60 req-b5d927d2-5173-4316-98ff-90ac2ff758a6 service nova] Releasing lock "refresh_cache-7bfda953-ac95-4dce-b7a7-c570eae35582" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1032.467135] env[62070]: DEBUG oslo_concurrency.lockutils [req-6ed8943d-419d-423a-8b53-472c892cc56c req-d750ea88-7028-4010-bbc8-779213ec01b9 service nova] Releasing lock "refresh_cache-6b6e22b9-71fb-4139-993a-7b9fcf89d8e0" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1032.595835] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ccb4ad5b-d0c2-47a7-8dd6-38fd928e4881 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.606424] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df23bbbc-c100-4a8f-8e29-63b64c7565f8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.627208] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.899s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.640535] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.320s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.642284] env[62070]: INFO nova.compute.claims [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1032.645232] env[62070]: INFO nova.compute.manager [-] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Took 1.48 seconds to deallocate network for instance. [ 1032.645538] env[62070]: DEBUG nova.compute.manager [req-6512bafa-7155-4c38-81f1-e62de8476756 req-1b395d07-c5ec-4dac-82cf-9b9c19180ba0 service nova] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Detach interface failed, port_id=aef8b9b0-4bbd-4af6-b65d-f7e964775fd4, reason: Instance 3d22f50a-e1b7-48f9-a044-df64d01dfeb4 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1032.659310] env[62070]: INFO nova.scheduler.client.report [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Deleted allocations for instance 84c00e4a-20d3-4739-8535-e27076d85a89 [ 1032.698465] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]522a4d38-4f2f-d5bb-d27d-7f4545b60e66, 'name': SearchDatastore_Task, 'duration_secs': 0.009217} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.699971] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f3cd9fd-57ff-4639-ac4d-d582144f696a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.706810] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 1032.706810] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52d75129-85d8-e649-19ce-68160da37804" [ 1032.706810] env[62070]: _type = "Task" [ 1032.706810] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.718199] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52d75129-85d8-e649-19ce-68160da37804, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.757640] env[62070]: DEBUG nova.compute.manager [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1032.783469] env[62070]: DEBUG nova.virt.hardware [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1032.783826] env[62070]: DEBUG nova.virt.hardware [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1032.784045] env[62070]: DEBUG nova.virt.hardware [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1032.784266] env[62070]: DEBUG nova.virt.hardware [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1032.784493] env[62070]: DEBUG nova.virt.hardware [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1032.784726] env[62070]: DEBUG nova.virt.hardware [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1032.784986] env[62070]: DEBUG nova.virt.hardware [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1032.785314] env[62070]: DEBUG nova.virt.hardware [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1032.785497] env[62070]: DEBUG nova.virt.hardware [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1032.785644] env[62070]: DEBUG nova.virt.hardware [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1032.785830] env[62070]: DEBUG nova.virt.hardware [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1032.787195] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e36bc5a-8cfb-4c88-9d1d-6ca9b6ad61f7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.797428] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c904c30b-4c9f-4ea8-89ca-de8a2b4ea0c1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.858704] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122319, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.152512] env[62070]: DEBUG oslo_concurrency.lockutils [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.168675] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c1634d99-99d1-47c7-a295-f741e79f98f2 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "84c00e4a-20d3-4739-8535-e27076d85a89" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.573s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.220467] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52d75129-85d8-e649-19ce-68160da37804, 'name': SearchDatastore_Task, 'duration_secs': 0.013476} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.220773] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1033.221513] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0/6b6e22b9-71fb-4139-993a-7b9fcf89d8e0.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1033.221513] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-40bfacbc-5926-40e9-a5de-dca3745bdc75 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.227929] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 1033.227929] env[62070]: value = "task-1122320" [ 1033.227929] env[62070]: _type = "Task" [ 1033.227929] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.236250] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122320, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.360836] env[62070]: DEBUG oslo_vmware.api [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122319, 'name': PowerOnVM_Task, 'duration_secs': 0.807152} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.361323] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1033.362104] env[62070]: INFO nova.compute.manager [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Took 9.54 seconds to spawn the instance on the hypervisor. [ 1033.362461] env[62070]: DEBUG nova.compute.manager [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1033.364101] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f67650-02a3-416f-806b-cccf97039bb1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.509081] env[62070]: DEBUG nova.network.neutron [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Successfully updated port: e07c1b27-68ca-4e3b-8554-fb5fabd9c0f4 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1033.740459] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122320, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.453738} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.740765] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0/6b6e22b9-71fb-4139-993a-7b9fcf89d8e0.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1033.741516] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1033.741516] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8cdcdb53-67e2-49aa-a521-3e9371e59e00 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.748170] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 1033.748170] env[62070]: value = "task-1122321" [ 1033.748170] env[62070]: _type = "Task" [ 1033.748170] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.756773] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122321, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.867128] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-928cd8b0-6c34-4f97-b051-42eb43730068 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.882593] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-401e6e4a-81c2-47b2-88eb-b474cc2f0738 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.914027] env[62070]: INFO nova.compute.manager [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Took 19.06 seconds to build instance. [ 1033.915664] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b026fb5-1538-4608-b8f5-51365d12d616 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.923655] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d65f55-d908-4ff0-9398-9a58351920f6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.940457] env[62070]: DEBUG nova.compute.provider_tree [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1034.011834] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "refresh_cache-d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.012200] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired lock "refresh_cache-d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.012200] env[62070]: DEBUG nova.network.neutron [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1034.120580] env[62070]: DEBUG nova.compute.manager [req-0da6f0be-bd7d-4010-aaed-0a92eb8bbf1e req-59e6e7bd-2243-47cf-9588-f7c9e64cafd4 service nova] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Received event network-vif-plugged-e07c1b27-68ca-4e3b-8554-fb5fabd9c0f4 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1034.120828] env[62070]: DEBUG oslo_concurrency.lockutils [req-0da6f0be-bd7d-4010-aaed-0a92eb8bbf1e req-59e6e7bd-2243-47cf-9588-f7c9e64cafd4 service nova] Acquiring lock "d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.121096] env[62070]: DEBUG oslo_concurrency.lockutils [req-0da6f0be-bd7d-4010-aaed-0a92eb8bbf1e req-59e6e7bd-2243-47cf-9588-f7c9e64cafd4 service nova] Lock "d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.121277] env[62070]: DEBUG oslo_concurrency.lockutils [req-0da6f0be-bd7d-4010-aaed-0a92eb8bbf1e req-59e6e7bd-2243-47cf-9588-f7c9e64cafd4 service nova] Lock "d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.121429] env[62070]: DEBUG nova.compute.manager [req-0da6f0be-bd7d-4010-aaed-0a92eb8bbf1e req-59e6e7bd-2243-47cf-9588-f7c9e64cafd4 service nova] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] No waiting events found dispatching network-vif-plugged-e07c1b27-68ca-4e3b-8554-fb5fabd9c0f4 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1034.121620] env[62070]: WARNING nova.compute.manager [req-0da6f0be-bd7d-4010-aaed-0a92eb8bbf1e req-59e6e7bd-2243-47cf-9588-f7c9e64cafd4 service nova] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Received unexpected event network-vif-plugged-e07c1b27-68ca-4e3b-8554-fb5fabd9c0f4 for instance with vm_state building and task_state spawning. [ 1034.121799] env[62070]: DEBUG nova.compute.manager [req-0da6f0be-bd7d-4010-aaed-0a92eb8bbf1e req-59e6e7bd-2243-47cf-9588-f7c9e64cafd4 service nova] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Received event network-changed-e07c1b27-68ca-4e3b-8554-fb5fabd9c0f4 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1034.121956] env[62070]: DEBUG nova.compute.manager [req-0da6f0be-bd7d-4010-aaed-0a92eb8bbf1e req-59e6e7bd-2243-47cf-9588-f7c9e64cafd4 service nova] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Refreshing instance network info cache due to event network-changed-e07c1b27-68ca-4e3b-8554-fb5fabd9c0f4. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1034.122535] env[62070]: DEBUG oslo_concurrency.lockutils [req-0da6f0be-bd7d-4010-aaed-0a92eb8bbf1e req-59e6e7bd-2243-47cf-9588-f7c9e64cafd4 service nova] Acquiring lock "refresh_cache-d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.215955] env[62070]: DEBUG nova.compute.manager [req-e2329471-3c01-48ca-a0f1-d3082847a167 req-f61e8c32-58ca-4135-8cc5-ecfe52433d23 service nova] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Received event network-changed-df67c123-2618-45ce-8175-66a34206293a {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1034.216220] env[62070]: DEBUG nova.compute.manager [req-e2329471-3c01-48ca-a0f1-d3082847a167 req-f61e8c32-58ca-4135-8cc5-ecfe52433d23 service nova] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Refreshing instance network info cache due to event network-changed-df67c123-2618-45ce-8175-66a34206293a. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1034.216915] env[62070]: DEBUG oslo_concurrency.lockutils [req-e2329471-3c01-48ca-a0f1-d3082847a167 req-f61e8c32-58ca-4135-8cc5-ecfe52433d23 service nova] Acquiring lock "refresh_cache-1ababba6-838c-4ba6-bd83-e2b15aaf4b97" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.216915] env[62070]: DEBUG oslo_concurrency.lockutils [req-e2329471-3c01-48ca-a0f1-d3082847a167 req-f61e8c32-58ca-4135-8cc5-ecfe52433d23 service nova] Acquired lock "refresh_cache-1ababba6-838c-4ba6-bd83-e2b15aaf4b97" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.216915] env[62070]: DEBUG nova.network.neutron [req-e2329471-3c01-48ca-a0f1-d3082847a167 req-f61e8c32-58ca-4135-8cc5-ecfe52433d23 service nova] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Refreshing network info cache for port df67c123-2618-45ce-8175-66a34206293a {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1034.258685] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122321, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063687} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.258975] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1034.260091] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6276312-4817-41d0-bee9-4e095b63d68f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.284824] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0/6b6e22b9-71fb-4139-993a-7b9fcf89d8e0.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1034.285625] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76c7d21c-0044-49c3-97b8-c76831b292e9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.306054] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 1034.306054] env[62070]: value = "task-1122322" [ 1034.306054] env[62070]: _type = "Task" [ 1034.306054] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.316088] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122322, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.419446] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1c62a5a1-6f00-412a-ada6-05392ff9718d tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Lock "1ababba6-838c-4ba6-bd83-e2b15aaf4b97" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.571s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.443350] env[62070]: DEBUG nova.scheduler.client.report [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1034.543988] env[62070]: DEBUG nova.network.neutron [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1034.751347] env[62070]: DEBUG nova.network.neutron [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Updating instance_info_cache with network_info: [{"id": "e07c1b27-68ca-4e3b-8554-fb5fabd9c0f4", "address": "fa:16:3e:80:68:50", "network": {"id": "4888f989-958d-49ff-bf5a-06873e4cc624", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-906255456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d079c0ef3ed745fcaf69dc728dca4466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape07c1b27-68", "ovs_interfaceid": "e07c1b27-68ca-4e3b-8554-fb5fabd9c0f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.817873] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122322, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.910237] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bbf70519-3941-46c2-b3bc-225e0e39f47a tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquiring lock "1ababba6-838c-4ba6-bd83-e2b15aaf4b97" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.910237] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bbf70519-3941-46c2-b3bc-225e0e39f47a tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Lock "1ababba6-838c-4ba6-bd83-e2b15aaf4b97" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.910237] env[62070]: INFO nova.compute.manager [None req-bbf70519-3941-46c2-b3bc-225e0e39f47a tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Rebooting instance [ 1034.948651] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.308s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.949323] env[62070]: DEBUG nova.compute.manager [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1034.956101] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 10.711s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.996758] env[62070]: DEBUG nova.network.neutron [req-e2329471-3c01-48ca-a0f1-d3082847a167 req-f61e8c32-58ca-4135-8cc5-ecfe52433d23 service nova] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Updated VIF entry in instance network info cache for port df67c123-2618-45ce-8175-66a34206293a. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1034.997166] env[62070]: DEBUG nova.network.neutron [req-e2329471-3c01-48ca-a0f1-d3082847a167 req-f61e8c32-58ca-4135-8cc5-ecfe52433d23 service nova] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Updating instance_info_cache with network_info: [{"id": "df67c123-2618-45ce-8175-66a34206293a", "address": "fa:16:3e:38:62:7a", "network": {"id": "fd8b220c-f20c-489e-9c20-28b886709536", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-452165452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1960779e94c4e119497a0c1117f54fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf67c123-26", "ovs_interfaceid": "df67c123-2618-45ce-8175-66a34206293a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.253834] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Releasing lock "refresh_cache-d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.254041] env[62070]: DEBUG nova.compute.manager [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Instance network_info: |[{"id": "e07c1b27-68ca-4e3b-8554-fb5fabd9c0f4", "address": "fa:16:3e:80:68:50", "network": {"id": "4888f989-958d-49ff-bf5a-06873e4cc624", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-906255456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d079c0ef3ed745fcaf69dc728dca4466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape07c1b27-68", "ovs_interfaceid": "e07c1b27-68ca-4e3b-8554-fb5fabd9c0f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1035.254329] env[62070]: DEBUG oslo_concurrency.lockutils [req-0da6f0be-bd7d-4010-aaed-0a92eb8bbf1e req-59e6e7bd-2243-47cf-9588-f7c9e64cafd4 service nova] Acquired lock "refresh_cache-d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.254647] env[62070]: DEBUG nova.network.neutron [req-0da6f0be-bd7d-4010-aaed-0a92eb8bbf1e req-59e6e7bd-2243-47cf-9588-f7c9e64cafd4 service nova] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Refreshing network info cache for port e07c1b27-68ca-4e3b-8554-fb5fabd9c0f4 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1035.255820] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:68:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7e0240aa-a694-48fc-a0f9-6f2d3e71aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e07c1b27-68ca-4e3b-8554-fb5fabd9c0f4', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1035.263392] env[62070]: DEBUG oslo.service.loopingcall [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1035.266442] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1035.266910] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e5e2902-c207-49ce-868b-634503440c91 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.286865] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1035.286865] env[62070]: value = "task-1122323" [ 1035.286865] env[62070]: _type = "Task" [ 1035.286865] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.294581] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122323, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.319925] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122322, 'name': ReconfigVM_Task, 'duration_secs': 0.558442} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.320270] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Reconfigured VM instance instance-00000063 to attach disk [datastore2] 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0/6b6e22b9-71fb-4139-993a-7b9fcf89d8e0.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1035.320949] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c01223c8-c927-43df-84a8-5d1cbac1271b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.327366] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 1035.327366] env[62070]: value = "task-1122324" [ 1035.327366] env[62070]: _type = "Task" [ 1035.327366] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.336272] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122324, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.440411] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bbf70519-3941-46c2-b3bc-225e0e39f47a tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquiring lock "refresh_cache-1ababba6-838c-4ba6-bd83-e2b15aaf4b97" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.468620] env[62070]: DEBUG nova.compute.utils [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1035.472922] env[62070]: INFO nova.compute.claims [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1035.477512] env[62070]: DEBUG nova.compute.manager [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1035.477792] env[62070]: DEBUG nova.network.neutron [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1035.503697] env[62070]: DEBUG oslo_concurrency.lockutils [req-e2329471-3c01-48ca-a0f1-d3082847a167 req-f61e8c32-58ca-4135-8cc5-ecfe52433d23 service nova] Releasing lock "refresh_cache-1ababba6-838c-4ba6-bd83-e2b15aaf4b97" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.503697] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bbf70519-3941-46c2-b3bc-225e0e39f47a tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquired lock "refresh_cache-1ababba6-838c-4ba6-bd83-e2b15aaf4b97" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.503697] env[62070]: DEBUG nova.network.neutron [None req-bbf70519-3941-46c2-b3bc-225e0e39f47a tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1035.539544] env[62070]: DEBUG nova.policy [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a91eca948b964f1885f1effb82ea35dc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '735d24ccc5614660a5b34d77af648f94', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 1035.759667] env[62070]: DEBUG nova.network.neutron [req-0da6f0be-bd7d-4010-aaed-0a92eb8bbf1e req-59e6e7bd-2243-47cf-9588-f7c9e64cafd4 service nova] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Updated VIF entry in instance network info cache for port e07c1b27-68ca-4e3b-8554-fb5fabd9c0f4. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1035.760068] env[62070]: DEBUG nova.network.neutron [req-0da6f0be-bd7d-4010-aaed-0a92eb8bbf1e req-59e6e7bd-2243-47cf-9588-f7c9e64cafd4 service nova] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Updating instance_info_cache with network_info: [{"id": "e07c1b27-68ca-4e3b-8554-fb5fabd9c0f4", "address": "fa:16:3e:80:68:50", "network": {"id": "4888f989-958d-49ff-bf5a-06873e4cc624", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-906255456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d079c0ef3ed745fcaf69dc728dca4466", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape07c1b27-68", "ovs_interfaceid": "e07c1b27-68ca-4e3b-8554-fb5fabd9c0f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.761514] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "d8284a01-bbf6-4607-b2db-33bf2cd5457d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.761745] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "d8284a01-bbf6-4607-b2db-33bf2cd5457d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.796961] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122323, 'name': CreateVM_Task, 'duration_secs': 0.346927} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.797199] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1035.797859] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.798033] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.798400] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1035.798661] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac2f521e-37fc-41d7-9efa-efb24c74f7ee {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.803898] env[62070]: DEBUG oslo_vmware.api [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 1035.803898] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52d69558-de60-e464-e177-6e413c428f9f" [ 1035.803898] env[62070]: _type = "Task" [ 1035.803898] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.812470] env[62070]: DEBUG oslo_vmware.api [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52d69558-de60-e464-e177-6e413c428f9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.836782] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122324, 'name': Rename_Task, 'duration_secs': 0.195129} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.837068] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1035.837332] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8af04e6-7994-4a8b-8754-df5cce1ab17b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.845232] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 1035.845232] env[62070]: value = "task-1122325" [ 1035.845232] env[62070]: _type = "Task" [ 1035.845232] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.853566] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122325, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.981023] env[62070]: DEBUG nova.compute.manager [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1035.983292] env[62070]: INFO nova.compute.resource_tracker [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Updating resource usage from migration 947ffd09-2ce3-4070-89c6-6ce3369d0dd2 [ 1036.192886] env[62070]: DEBUG nova.network.neutron [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Successfully created port: 57c0b4a0-14e4-4b19-b7ee-16842d63fd74 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1036.253981] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e77dc285-fe01-4f65-9583-b1a837aa746f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.261897] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a809bf-1b96-4a90-9141-86f5ed46181d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.265790] env[62070]: DEBUG oslo_concurrency.lockutils [req-0da6f0be-bd7d-4010-aaed-0a92eb8bbf1e req-59e6e7bd-2243-47cf-9588-f7c9e64cafd4 service nova] Releasing lock "refresh_cache-d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.266326] env[62070]: DEBUG nova.compute.manager [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1036.297244] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8be26d2-b712-4eb3-bbb8-589cc3736d9e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.305423] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5acfefd0-b53d-4238-aed8-d83af0eb453c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.310898] env[62070]: DEBUG nova.network.neutron [None req-bbf70519-3941-46c2-b3bc-225e0e39f47a tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Updating instance_info_cache with network_info: [{"id": "df67c123-2618-45ce-8175-66a34206293a", "address": "fa:16:3e:38:62:7a", "network": {"id": "fd8b220c-f20c-489e-9c20-28b886709536", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-452165452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1960779e94c4e119497a0c1117f54fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf67c123-26", "ovs_interfaceid": "df67c123-2618-45ce-8175-66a34206293a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.329817] env[62070]: DEBUG nova.compute.provider_tree [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1036.334948] env[62070]: DEBUG oslo_vmware.api [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52d69558-de60-e464-e177-6e413c428f9f, 'name': SearchDatastore_Task, 'duration_secs': 0.009871} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.335591] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.335814] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1036.336099] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1036.336255] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.336437] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1036.336708] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-37eefba4-9214-4079-a78b-bf7479978ab7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.345259] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1036.345445] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1036.346246] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73ad42da-8aae-4bef-98fc-0dcf136cd0a2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.354541] env[62070]: DEBUG oslo_vmware.api [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 1036.354541] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52bdb212-1946-483f-c9ce-c5d178e9be21" [ 1036.354541] env[62070]: _type = "Task" [ 1036.354541] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.358039] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122325, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.366299] env[62070]: DEBUG oslo_vmware.api [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52bdb212-1946-483f-c9ce-c5d178e9be21, 'name': SearchDatastore_Task, 'duration_secs': 0.00873} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.367066] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8479b2a8-d5d5-4eb7-9b1e-008cae4bb80f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.371792] env[62070]: DEBUG oslo_vmware.api [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 1036.371792] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]528c5674-abde-f75c-8a01-14b1a46d8521" [ 1036.371792] env[62070]: _type = "Task" [ 1036.371792] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.381383] env[62070]: DEBUG oslo_vmware.api [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]528c5674-abde-f75c-8a01-14b1a46d8521, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.787322] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.815722] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bbf70519-3941-46c2-b3bc-225e0e39f47a tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Releasing lock "refresh_cache-1ababba6-838c-4ba6-bd83-e2b15aaf4b97" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.820934] env[62070]: DEBUG nova.compute.manager [None req-bbf70519-3941-46c2-b3bc-225e0e39f47a tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1036.821893] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52cec88-e57f-45e6-91b5-2926a7f89c9e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.837063] env[62070]: DEBUG nova.scheduler.client.report [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1036.856150] env[62070]: DEBUG oslo_vmware.api [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122325, 'name': PowerOnVM_Task, 'duration_secs': 0.79979} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.856441] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1036.857051] env[62070]: INFO nova.compute.manager [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Took 8.33 seconds to spawn the instance on the hypervisor. [ 1036.857051] env[62070]: DEBUG nova.compute.manager [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1036.857601] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32b67c9-09cd-463b-96c8-f90cec3e87c9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.882554] env[62070]: DEBUG oslo_vmware.api [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]528c5674-abde-f75c-8a01-14b1a46d8521, 'name': SearchDatastore_Task, 'duration_secs': 0.008638} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.882823] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.883096] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2/d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1036.883801] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-99d0804f-ca64-4f75-9f0c-c6a756a1c1eb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.889969] env[62070]: DEBUG oslo_vmware.api [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 1036.889969] env[62070]: value = "task-1122326" [ 1036.889969] env[62070]: _type = "Task" [ 1036.889969] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.897790] env[62070]: DEBUG oslo_vmware.api [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122326, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.994055] env[62070]: DEBUG nova.compute.manager [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1037.024759] env[62070]: DEBUG nova.virt.hardware [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1037.025072] env[62070]: DEBUG nova.virt.hardware [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1037.025276] env[62070]: DEBUG nova.virt.hardware [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1037.025488] env[62070]: DEBUG nova.virt.hardware [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1037.025645] env[62070]: DEBUG nova.virt.hardware [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1037.025820] env[62070]: DEBUG nova.virt.hardware [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1037.026092] env[62070]: DEBUG nova.virt.hardware [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1037.026289] env[62070]: DEBUG nova.virt.hardware [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1037.026469] env[62070]: DEBUG nova.virt.hardware [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1037.026651] env[62070]: DEBUG nova.virt.hardware [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1037.026865] env[62070]: DEBUG nova.virt.hardware [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1037.027813] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-620ea02d-e3c5-4130-af35-521dd20ef2ca {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.038892] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f536b209-fa47-4b80-93e9-57948902c3fb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.342987] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.390s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.343155] env[62070]: INFO nova.compute.manager [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Migrating [ 1037.349767] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 10.774s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.376255] env[62070]: INFO nova.compute.manager [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Took 19.98 seconds to build instance. [ 1037.401351] env[62070]: DEBUG oslo_vmware.api [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122326, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.692576] env[62070]: DEBUG nova.compute.manager [req-844695d9-469f-4f6c-962b-a27a3cf1ed04 req-a74e9c1b-49cc-4b7f-8618-7e827b993c77 service nova] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Received event network-vif-plugged-57c0b4a0-14e4-4b19-b7ee-16842d63fd74 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1037.692806] env[62070]: DEBUG oslo_concurrency.lockutils [req-844695d9-469f-4f6c-962b-a27a3cf1ed04 req-a74e9c1b-49cc-4b7f-8618-7e827b993c77 service nova] Acquiring lock "000a67eb-9535-4da6-816a-b61126f11509-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.693301] env[62070]: DEBUG oslo_concurrency.lockutils [req-844695d9-469f-4f6c-962b-a27a3cf1ed04 req-a74e9c1b-49cc-4b7f-8618-7e827b993c77 service nova] Lock "000a67eb-9535-4da6-816a-b61126f11509-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.693492] env[62070]: DEBUG oslo_concurrency.lockutils [req-844695d9-469f-4f6c-962b-a27a3cf1ed04 req-a74e9c1b-49cc-4b7f-8618-7e827b993c77 service nova] Lock "000a67eb-9535-4da6-816a-b61126f11509-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.693669] env[62070]: DEBUG nova.compute.manager [req-844695d9-469f-4f6c-962b-a27a3cf1ed04 req-a74e9c1b-49cc-4b7f-8618-7e827b993c77 service nova] [instance: 000a67eb-9535-4da6-816a-b61126f11509] No waiting events found dispatching network-vif-plugged-57c0b4a0-14e4-4b19-b7ee-16842d63fd74 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1037.693842] env[62070]: WARNING nova.compute.manager [req-844695d9-469f-4f6c-962b-a27a3cf1ed04 req-a74e9c1b-49cc-4b7f-8618-7e827b993c77 service nova] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Received unexpected event network-vif-plugged-57c0b4a0-14e4-4b19-b7ee-16842d63fd74 for instance with vm_state building and task_state spawning. [ 1037.840268] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa98ddf-b57b-4e09-a094-13040caeafb3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.850153] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf70519-3941-46c2-b3bc-225e0e39f47a tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Doing hard reboot of VM {{(pid=62070) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1064}} [ 1037.850526] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-680b12d6-73b6-446b-b347-46fe182141a4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.858665] env[62070]: DEBUG oslo_vmware.api [None req-bbf70519-3941-46c2-b3bc-225e0e39f47a tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Waiting for the task: (returnval){ [ 1037.858665] env[62070]: value = "task-1122327" [ 1037.858665] env[62070]: _type = "Task" [ 1037.858665] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.859675] env[62070]: DEBUG nova.network.neutron [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Successfully updated port: 57c0b4a0-14e4-4b19-b7ee-16842d63fd74 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1037.869059] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "refresh_cache-33d04e59-da01-4ba3-ac42-ab93372a332d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1037.869288] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired lock "refresh_cache-33d04e59-da01-4ba3-ac42-ab93372a332d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.869513] env[62070]: DEBUG nova.network.neutron [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1037.881899] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a65cb8ad-84af-4b6e-a9a1-bf2808585ba4 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "6b6e22b9-71fb-4139-993a-7b9fcf89d8e0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.490s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.883284] env[62070]: DEBUG oslo_vmware.api [None req-bbf70519-3941-46c2-b3bc-225e0e39f47a tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122327, 'name': ResetVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.902542] env[62070]: DEBUG oslo_vmware.api [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122326, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.032639] env[62070]: DEBUG oslo_vmware.rw_handles [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527d396b-e006-c5ea-747b-2ea21c31af58/disk-0.vmdk. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1038.033749] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3025428c-bc5e-42f7-9be9-b37a12eb3839 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.040964] env[62070]: DEBUG oslo_vmware.rw_handles [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527d396b-e006-c5ea-747b-2ea21c31af58/disk-0.vmdk is in state: ready. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1038.041194] env[62070]: ERROR oslo_vmware.rw_handles [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527d396b-e006-c5ea-747b-2ea21c31af58/disk-0.vmdk due to incomplete transfer. [ 1038.041481] env[62070]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5f6cf141-3c97-4d70-840c-408a8567820a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.048690] env[62070]: DEBUG oslo_vmware.rw_handles [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527d396b-e006-c5ea-747b-2ea21c31af58/disk-0.vmdk. {{(pid=62070) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1038.048938] env[62070]: DEBUG nova.virt.vmwareapi.images [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Uploaded image 24ed4c28-b352-4867-857b-17f9624cc455 to the Glance image server {{(pid=62070) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 1038.051457] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Destroying the VM {{(pid=62070) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 1038.051719] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-03dbea28-4184-4ecf-b3fe-cc079c906aea {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.057084] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 1038.057084] env[62070]: value = "task-1122328" [ 1038.057084] env[62070]: _type = "Task" [ 1038.057084] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.065405] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122328, 'name': Destroy_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.135743] env[62070]: DEBUG oslo_concurrency.lockutils [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "6b6e22b9-71fb-4139-993a-7b9fcf89d8e0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.136529] env[62070]: DEBUG oslo_concurrency.lockutils [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "6b6e22b9-71fb-4139-993a-7b9fcf89d8e0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.136529] env[62070]: DEBUG oslo_concurrency.lockutils [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "6b6e22b9-71fb-4139-993a-7b9fcf89d8e0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.136529] env[62070]: DEBUG oslo_concurrency.lockutils [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "6b6e22b9-71fb-4139-993a-7b9fcf89d8e0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.136712] env[62070]: DEBUG oslo_concurrency.lockutils [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "6b6e22b9-71fb-4139-993a-7b9fcf89d8e0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1038.139130] env[62070]: INFO nova.compute.manager [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Terminating instance [ 1038.141518] env[62070]: DEBUG nova.compute.manager [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1038.141722] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1038.142558] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb68bfe-a5ba-424e-9a39-41d7b1f885df {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.149744] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1038.149982] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf77e430-0547-4a57-a5d9-cffdf1f00c97 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.155331] env[62070]: DEBUG oslo_vmware.api [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 1038.155331] env[62070]: value = "task-1122329" [ 1038.155331] env[62070]: _type = "Task" [ 1038.155331] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.162751] env[62070]: DEBUG oslo_vmware.api [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122329, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.371021] env[62070]: DEBUG oslo_vmware.api [None req-bbf70519-3941-46c2-b3bc-225e0e39f47a tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122327, 'name': ResetVM_Task, 'duration_secs': 0.109872} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.371754] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Applying migration context for instance 33d04e59-da01-4ba3-ac42-ab93372a332d as it has an incoming, in-progress migration 947ffd09-2ce3-4070-89c6-6ce3369d0dd2. Migration status is pre-migrating {{(pid=62070) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1038.373343] env[62070]: INFO nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Updating resource usage from migration 947ffd09-2ce3-4070-89c6-6ce3369d0dd2 [ 1038.375714] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "refresh_cache-000a67eb-9535-4da6-816a-b61126f11509" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1038.375849] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquired lock "refresh_cache-000a67eb-9535-4da6-816a-b61126f11509" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.376021] env[62070]: DEBUG nova.network.neutron [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1038.377155] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf70519-3941-46c2-b3bc-225e0e39f47a tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Did hard reboot of VM {{(pid=62070) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1068}} [ 1038.377349] env[62070]: DEBUG nova.compute.manager [None req-bbf70519-3941-46c2-b3bc-225e0e39f47a tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1038.380458] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4b7f369-21a1-4f6d-a434-659b161af464 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.400708] env[62070]: DEBUG oslo_vmware.api [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122326, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.03678} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.401732] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 67e99ada-a8e6-4034-b19b-5b2cb883b735 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.402104] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance a3c42653-9a4b-42d3-bc38-8d46d95c8f64 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.402104] env[62070]: WARNING nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 3d22f50a-e1b7-48f9-a044-df64d01dfeb4 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1038.402230] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 53a1791d-38fd-4721-b82c-2f0922348300 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.402345] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance a5cba512-9b50-4ca3-93eb-345be12dc588 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.402474] env[62070]: WARNING nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 71aead12-a182-40a7-b5a9-91c01271b800 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1038.402588] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.403763] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 5cccd79d-d243-49db-8581-718dd594f3b3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.403942] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 7bfda953-ac95-4dce-b7a7-c570eae35582 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.404086] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 1ababba6-838c-4ba6-bd83-e2b15aaf4b97 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.404208] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.404324] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.404481] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 000a67eb-9535-4da6-816a-b61126f11509 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.404621] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Migration 947ffd09-2ce3-4070-89c6-6ce3369d0dd2 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1038.404736] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 33d04e59-da01-4ba3-ac42-ab93372a332d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.406239] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2/d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1038.406448] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1038.407215] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-be5e5d61-54d3-42f9-b690-a65fdb0b32d4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.414242] env[62070]: DEBUG oslo_vmware.api [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 1038.414242] env[62070]: value = "task-1122330" [ 1038.414242] env[62070]: _type = "Task" [ 1038.414242] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.428927] env[62070]: DEBUG oslo_vmware.api [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122330, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.568934] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122328, 'name': Destroy_Task} progress is 33%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.651158] env[62070]: DEBUG nova.network.neutron [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Updating instance_info_cache with network_info: [{"id": "222fba36-759a-41f7-a82a-cb4047bd3725", "address": "fa:16:3e:7a:4c:a1", "network": {"id": "5ea0fffc-372c-450e-b27b-10959077d58f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1853458988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9191f0e6c2ee401abca64c0780e230bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3c995e9-7f2f-420c-880a-d60da6e708ad", "external-id": "nsx-vlan-transportzone-166", "segmentation_id": 166, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap222fba36-75", "ovs_interfaceid": "222fba36-759a-41f7-a82a-cb4047bd3725", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.665583] env[62070]: DEBUG oslo_vmware.api [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122329, 'name': PowerOffVM_Task, 'duration_secs': 0.340334} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.666514] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1038.666679] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1038.666943] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-223a2006-2a81-4a35-9d38-a36d150b0f43 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.757821] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1038.758069] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1038.758307] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Deleting the datastore file [datastore2] 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1038.758579] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d0b46b61-a8d2-4d50-b23a-97a0c520ad7d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.765011] env[62070]: DEBUG oslo_vmware.api [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 1038.765011] env[62070]: value = "task-1122332" [ 1038.765011] env[62070]: _type = "Task" [ 1038.765011] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.773132] env[62070]: DEBUG oslo_vmware.api [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122332, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.894787] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bbf70519-3941-46c2-b3bc-225e0e39f47a tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Lock "1ababba6-838c-4ba6-bd83-e2b15aaf4b97" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 3.986s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1038.908766] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance d8284a01-bbf6-4607-b2db-33bf2cd5457d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1038.909058] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=62070) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1038.909324] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3072MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=62070) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1038.912413] env[62070]: DEBUG nova.network.neutron [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1038.926294] env[62070]: DEBUG oslo_vmware.api [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122330, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.329818} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.926828] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1038.928186] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bdfe850-6640-4933-bfd6-393a9419f33e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.972353] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2/d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1038.979673] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4c4abcf-78af-419a-b08b-732e6ecfe2f8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.012182] env[62070]: DEBUG oslo_vmware.api [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 1039.012182] env[62070]: value = "task-1122333" [ 1039.012182] env[62070]: _type = "Task" [ 1039.012182] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.028622] env[62070]: DEBUG oslo_vmware.api [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122333, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.070041] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122328, 'name': Destroy_Task, 'duration_secs': 0.60502} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.070173] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Destroyed the VM [ 1039.070366] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Deleting Snapshot of the VM instance {{(pid=62070) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1039.070631] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-bebb8cae-a3c4-448b-aeee-084bf46a5be3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.079582] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 1039.079582] env[62070]: value = "task-1122334" [ 1039.079582] env[62070]: _type = "Task" [ 1039.079582] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.092031] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122334, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.156602] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Releasing lock "refresh_cache-33d04e59-da01-4ba3-ac42-ab93372a332d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1039.169939] env[62070]: DEBUG nova.network.neutron [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Updating instance_info_cache with network_info: [{"id": "57c0b4a0-14e4-4b19-b7ee-16842d63fd74", "address": "fa:16:3e:87:a3:a2", "network": {"id": "6ea9aade-1b40-4ce8-a502-14ff09a4ab40", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1617295069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "735d24ccc5614660a5b34d77af648f94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57c0b4a0-14", "ovs_interfaceid": "57c0b4a0-14e4-4b19-b7ee-16842d63fd74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.246745] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b099f92-7b7c-4a45-9698-b44339dca2b2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.255195] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf9b197-d94f-43a8-9fe1-19d3c07874a6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.293572] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f009b59-2520-4f91-9f40-c40db6ee7db6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.302225] env[62070]: DEBUG oslo_vmware.api [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122332, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.440996} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.304491] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1039.304990] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1039.305217] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1039.305430] env[62070]: INFO nova.compute.manager [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1039.305693] env[62070]: DEBUG oslo.service.loopingcall [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1039.305958] env[62070]: DEBUG nova.compute.manager [-] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1039.306068] env[62070]: DEBUG nova.network.neutron [-] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1039.308929] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c989be2c-3aaf-4604-a0fd-96046116232a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.325301] env[62070]: DEBUG nova.compute.provider_tree [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1039.531245] env[62070]: DEBUG oslo_vmware.api [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122333, 'name': ReconfigVM_Task, 'duration_secs': 0.361399} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.531818] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Reconfigured VM instance instance-00000064 to attach disk [datastore2] d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2/d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1039.532981] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6ec7cd0e-8e8c-4421-bc0a-74b9392303ad {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.540638] env[62070]: DEBUG oslo_vmware.api [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 1039.540638] env[62070]: value = "task-1122335" [ 1039.540638] env[62070]: _type = "Task" [ 1039.540638] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.551227] env[62070]: DEBUG oslo_vmware.api [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122335, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.593816] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122334, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.677652] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Releasing lock "refresh_cache-000a67eb-9535-4da6-816a-b61126f11509" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1039.678024] env[62070]: DEBUG nova.compute.manager [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Instance network_info: |[{"id": "57c0b4a0-14e4-4b19-b7ee-16842d63fd74", "address": "fa:16:3e:87:a3:a2", "network": {"id": "6ea9aade-1b40-4ce8-a502-14ff09a4ab40", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1617295069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "735d24ccc5614660a5b34d77af648f94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57c0b4a0-14", "ovs_interfaceid": "57c0b4a0-14e4-4b19-b7ee-16842d63fd74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1039.678512] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:a3:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5b8af79a-31d5-4d78-93d7-3919aa1d9186', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '57c0b4a0-14e4-4b19-b7ee-16842d63fd74', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1039.692778] env[62070]: DEBUG oslo.service.loopingcall [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1039.693818] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1039.693818] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-62256313-4905-4da3-89a6-ac568189f7db {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.716764] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1039.716764] env[62070]: value = "task-1122336" [ 1039.716764] env[62070]: _type = "Task" [ 1039.716764] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.727814] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122336, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.828818] env[62070]: DEBUG nova.scheduler.client.report [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1039.854618] env[62070]: DEBUG nova.compute.manager [req-a1792ec0-21f8-447c-b3ce-adc85d573952 req-9bc50cf2-ca8e-40de-9729-4bbc069b30ae service nova] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Received event network-changed-57c0b4a0-14e4-4b19-b7ee-16842d63fd74 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1039.854708] env[62070]: DEBUG nova.compute.manager [req-a1792ec0-21f8-447c-b3ce-adc85d573952 req-9bc50cf2-ca8e-40de-9729-4bbc069b30ae service nova] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Refreshing instance network info cache due to event network-changed-57c0b4a0-14e4-4b19-b7ee-16842d63fd74. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1039.854888] env[62070]: DEBUG oslo_concurrency.lockutils [req-a1792ec0-21f8-447c-b3ce-adc85d573952 req-9bc50cf2-ca8e-40de-9729-4bbc069b30ae service nova] Acquiring lock "refresh_cache-000a67eb-9535-4da6-816a-b61126f11509" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1039.855106] env[62070]: DEBUG oslo_concurrency.lockutils [req-a1792ec0-21f8-447c-b3ce-adc85d573952 req-9bc50cf2-ca8e-40de-9729-4bbc069b30ae service nova] Acquired lock "refresh_cache-000a67eb-9535-4da6-816a-b61126f11509" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.855310] env[62070]: DEBUG nova.network.neutron [req-a1792ec0-21f8-447c-b3ce-adc85d573952 req-9bc50cf2-ca8e-40de-9729-4bbc069b30ae service nova] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Refreshing network info cache for port 57c0b4a0-14e4-4b19-b7ee-16842d63fd74 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1040.053496] env[62070]: DEBUG oslo_vmware.api [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122335, 'name': Rename_Task, 'duration_secs': 0.143178} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.053873] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1040.054017] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-52750163-b2e2-4bde-af98-f60e490dc9b9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.059875] env[62070]: DEBUG oslo_vmware.api [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 1040.059875] env[62070]: value = "task-1122337" [ 1040.059875] env[62070]: _type = "Task" [ 1040.059875] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.067551] env[62070]: DEBUG oslo_vmware.api [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122337, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.091509] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122334, 'name': RemoveSnapshot_Task, 'duration_secs': 0.58179} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.091873] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Deleted Snapshot of the VM instance {{(pid=62070) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1040.092204] env[62070]: DEBUG nova.compute.manager [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1040.093027] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c335dd-6bea-412d-a7d4-aad5d2baa490 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.227280] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122336, 'name': CreateVM_Task, 'duration_secs': 0.337309} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.227464] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1040.228166] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1040.228383] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.228759] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1040.228985] env[62070]: DEBUG nova.network.neutron [-] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.230224] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9cb47e1-aba3-48ee-a9c3-c46d5f59d0b6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.234865] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1040.234865] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52550677-6135-5c1a-56c6-0a782e267ed5" [ 1040.234865] env[62070]: _type = "Task" [ 1040.234865] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.243678] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52550677-6135-5c1a-56c6-0a782e267ed5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.333663] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62070) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1040.334025] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.984s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.334409] env[62070]: DEBUG oslo_concurrency.lockutils [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.825s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.334674] env[62070]: DEBUG oslo_concurrency.lockutils [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.336960] env[62070]: DEBUG oslo_concurrency.lockutils [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.185s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.337172] env[62070]: DEBUG oslo_concurrency.lockutils [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.339332] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.552s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.340813] env[62070]: INFO nova.compute.claims [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1040.366198] env[62070]: INFO nova.scheduler.client.report [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Deleted allocations for instance 71aead12-a182-40a7-b5a9-91c01271b800 [ 1040.367885] env[62070]: INFO nova.scheduler.client.report [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Deleted allocations for instance 3d22f50a-e1b7-48f9-a044-df64d01dfeb4 [ 1040.502060] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquiring lock "1ababba6-838c-4ba6-bd83-e2b15aaf4b97" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.502432] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Lock "1ababba6-838c-4ba6-bd83-e2b15aaf4b97" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.502723] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquiring lock "1ababba6-838c-4ba6-bd83-e2b15aaf4b97-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.502982] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Lock "1ababba6-838c-4ba6-bd83-e2b15aaf4b97-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.503242] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Lock "1ababba6-838c-4ba6-bd83-e2b15aaf4b97-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.506634] env[62070]: INFO nova.compute.manager [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Terminating instance [ 1040.509456] env[62070]: DEBUG nova.compute.manager [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1040.509730] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1040.511045] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a06d9c-f381-4709-a39e-2b748b1b14e8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.523019] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1040.523312] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-162e3862-1e9c-4e0c-bb95-a260751f6579 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.530101] env[62070]: DEBUG oslo_vmware.api [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Waiting for the task: (returnval){ [ 1040.530101] env[62070]: value = "task-1122338" [ 1040.530101] env[62070]: _type = "Task" [ 1040.530101] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.538382] env[62070]: DEBUG oslo_vmware.api [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122338, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.569394] env[62070]: DEBUG oslo_vmware.api [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122337, 'name': PowerOnVM_Task, 'duration_secs': 0.491034} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.569568] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1040.569805] env[62070]: INFO nova.compute.manager [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Took 7.81 seconds to spawn the instance on the hypervisor. [ 1040.569996] env[62070]: DEBUG nova.compute.manager [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1040.570798] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77fd0a71-062e-46ea-931f-e7299a1ad75f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.590697] env[62070]: DEBUG nova.network.neutron [req-a1792ec0-21f8-447c-b3ce-adc85d573952 req-9bc50cf2-ca8e-40de-9729-4bbc069b30ae service nova] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Updated VIF entry in instance network info cache for port 57c0b4a0-14e4-4b19-b7ee-16842d63fd74. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1040.591092] env[62070]: DEBUG nova.network.neutron [req-a1792ec0-21f8-447c-b3ce-adc85d573952 req-9bc50cf2-ca8e-40de-9729-4bbc069b30ae service nova] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Updating instance_info_cache with network_info: [{"id": "57c0b4a0-14e4-4b19-b7ee-16842d63fd74", "address": "fa:16:3e:87:a3:a2", "network": {"id": "6ea9aade-1b40-4ce8-a502-14ff09a4ab40", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1617295069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "735d24ccc5614660a5b34d77af648f94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57c0b4a0-14", "ovs_interfaceid": "57c0b4a0-14e4-4b19-b7ee-16842d63fd74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.606140] env[62070]: INFO nova.compute.manager [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Shelve offloading [ 1040.608340] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1040.608953] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cd32b350-c1fe-437e-89c0-611d6294a0ad {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.617753] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 1040.617753] env[62070]: value = "task-1122339" [ 1040.617753] env[62070]: _type = "Task" [ 1040.617753] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.629736] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] VM already powered off {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1040.630297] env[62070]: DEBUG nova.compute.manager [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1040.632226] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a9e07b-2404-4cfd-a584-3fe44fceff8a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.640254] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquiring lock "refresh_cache-53a1791d-38fd-4721-b82c-2f0922348300" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1040.640508] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquired lock "refresh_cache-53a1791d-38fd-4721-b82c-2f0922348300" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.640767] env[62070]: DEBUG nova.network.neutron [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1040.677022] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac84e6aa-3892-41ba-937a-708e741ffe39 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.699529] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Updating instance '33d04e59-da01-4ba3-ac42-ab93372a332d' progress to 0 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1040.733462] env[62070]: INFO nova.compute.manager [-] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Took 1.43 seconds to deallocate network for instance. [ 1040.749059] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52550677-6135-5c1a-56c6-0a782e267ed5, 'name': SearchDatastore_Task, 'duration_secs': 0.015854} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.749059] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1040.749251] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1040.749498] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1040.749709] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.749938] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1040.750252] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-613506f1-b467-4eb2-b3bd-e2347866296b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.760511] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1040.760721] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1040.761701] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bb8d62e-a5b5-4852-b705-7f7201e786e4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.767596] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1040.767596] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52406070-ce46-0f19-86a2-b205f9b55d8d" [ 1040.767596] env[62070]: _type = "Task" [ 1040.767596] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.776466] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52406070-ce46-0f19-86a2-b205f9b55d8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.879141] env[62070]: DEBUG oslo_concurrency.lockutils [None req-eebc804e-1b9f-461a-b6cc-fd58e5463e88 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "71aead12-a182-40a7-b5a9-91c01271b800" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.836s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.880681] env[62070]: DEBUG oslo_concurrency.lockutils [None req-516d037f-acfd-4f04-ae39-a1a5f0a01610 tempest-ServerRescueNegativeTestJSON-883366746 tempest-ServerRescueNegativeTestJSON-883366746-project-member] Lock "3d22f50a-e1b7-48f9-a044-df64d01dfeb4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.961s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.042024] env[62070]: DEBUG oslo_vmware.api [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122338, 'name': PowerOffVM_Task, 'duration_secs': 0.435952} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.042354] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1041.042610] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1041.042924] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6d6606f-e636-4b42-8d60-dd3fe239e3c8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.087739] env[62070]: INFO nova.compute.manager [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Took 20.44 seconds to build instance. [ 1041.093382] env[62070]: DEBUG oslo_concurrency.lockutils [req-a1792ec0-21f8-447c-b3ce-adc85d573952 req-9bc50cf2-ca8e-40de-9729-4bbc069b30ae service nova] Releasing lock "refresh_cache-000a67eb-9535-4da6-816a-b61126f11509" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.111531] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1041.111879] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1041.112109] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Deleting the datastore file [datastore1] 1ababba6-838c-4ba6-bd83-e2b15aaf4b97 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1041.112437] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-10707d89-77f5-4a4e-90e9-7b836b888438 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.119272] env[62070]: DEBUG oslo_vmware.api [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Waiting for the task: (returnval){ [ 1041.119272] env[62070]: value = "task-1122341" [ 1041.119272] env[62070]: _type = "Task" [ 1041.119272] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.130611] env[62070]: DEBUG oslo_vmware.api [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122341, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.206158] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1041.209171] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-349ee576-5304-4826-bcc5-d2b8ce46bb21 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.215336] env[62070]: DEBUG oslo_vmware.api [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 1041.215336] env[62070]: value = "task-1122342" [ 1041.215336] env[62070]: _type = "Task" [ 1041.215336] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.227326] env[62070]: DEBUG oslo_vmware.api [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122342, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.242938] env[62070]: DEBUG oslo_concurrency.lockutils [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.279060] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52406070-ce46-0f19-86a2-b205f9b55d8d, 'name': SearchDatastore_Task, 'duration_secs': 0.015086} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.279878] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5740499-0658-4523-a18e-01c0b9dbcc6a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.289147] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1041.289147] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]527e3165-8b4d-6912-d09f-fdcb4ab6d25c" [ 1041.289147] env[62070]: _type = "Task" [ 1041.289147] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.297451] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527e3165-8b4d-6912-d09f-fdcb4ab6d25c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.570789] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f4a10c2-4fbc-4d83-9c33-6b2a56e0ed50 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.580104] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-526dd3db-4278-487c-93f1-aa1e08d1a7bd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.611364] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2dd24fa0-f731-48ce-8a8d-1fa6fec29bbf tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.970s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.612392] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b53ac7-9a3e-41b3-b9ee-3ffecbe4475a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.620881] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bad68cc-c03a-4643-997d-fe20dd560a00 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.640551] env[62070]: DEBUG nova.compute.provider_tree [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1041.645022] env[62070]: DEBUG nova.network.neutron [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Updating instance_info_cache with network_info: [{"id": "2c6759e4-b6e7-4b67-b06d-d38d6043d3b2", "address": "fa:16:3e:11:96:b8", "network": {"id": "6a62b79f-a98b-4518-86cb-facc7b77da1d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2107556336-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "772f7fcee5f44b899b6df797e1ed5ddd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c6759e4-b6", "ovs_interfaceid": "2c6759e4-b6e7-4b67-b06d-d38d6043d3b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.647167] env[62070]: DEBUG oslo_vmware.api [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122341, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.391514} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.647770] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1041.649147] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1041.649353] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1041.649541] env[62070]: INFO nova.compute.manager [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1041.649792] env[62070]: DEBUG oslo.service.loopingcall [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1041.649986] env[62070]: DEBUG nova.compute.manager [-] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1041.651421] env[62070]: DEBUG nova.network.neutron [-] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1041.727153] env[62070]: DEBUG oslo_vmware.api [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122342, 'name': PowerOffVM_Task, 'duration_secs': 0.21204} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.729018] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1041.729018] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Updating instance '33d04e59-da01-4ba3-ac42-ab93372a332d' progress to 17 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1041.800454] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527e3165-8b4d-6912-d09f-fdcb4ab6d25c, 'name': SearchDatastore_Task, 'duration_secs': 0.024989} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.803184] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.803184] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 000a67eb-9535-4da6-816a-b61126f11509/000a67eb-9535-4da6-816a-b61126f11509.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1041.803184] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fa6d2ec4-97db-4a10-881e-d572840ab8cf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.809332] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1041.809332] env[62070]: value = "task-1122343" [ 1041.809332] env[62070]: _type = "Task" [ 1041.809332] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.817332] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122343, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.899398] env[62070]: DEBUG nova.compute.manager [req-7f7f3bd6-1b18-4fcb-a4f8-8f4dddb26f22 req-a7f64bff-c1e5-4a76-8438-fd5b145d2e2d service nova] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Received event network-vif-deleted-eca3debc-4e8e-4e1b-af00-7eca703ad396 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1041.899642] env[62070]: DEBUG nova.compute.manager [req-7f7f3bd6-1b18-4fcb-a4f8-8f4dddb26f22 req-a7f64bff-c1e5-4a76-8438-fd5b145d2e2d service nova] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Received event network-changed-df67c123-2618-45ce-8175-66a34206293a {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1041.899816] env[62070]: DEBUG nova.compute.manager [req-7f7f3bd6-1b18-4fcb-a4f8-8f4dddb26f22 req-a7f64bff-c1e5-4a76-8438-fd5b145d2e2d service nova] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Refreshing instance network info cache due to event network-changed-df67c123-2618-45ce-8175-66a34206293a. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1041.900053] env[62070]: DEBUG oslo_concurrency.lockutils [req-7f7f3bd6-1b18-4fcb-a4f8-8f4dddb26f22 req-a7f64bff-c1e5-4a76-8438-fd5b145d2e2d service nova] Acquiring lock "refresh_cache-1ababba6-838c-4ba6-bd83-e2b15aaf4b97" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1041.900246] env[62070]: DEBUG oslo_concurrency.lockutils [req-7f7f3bd6-1b18-4fcb-a4f8-8f4dddb26f22 req-a7f64bff-c1e5-4a76-8438-fd5b145d2e2d service nova] Acquired lock "refresh_cache-1ababba6-838c-4ba6-bd83-e2b15aaf4b97" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.900472] env[62070]: DEBUG nova.network.neutron [req-7f7f3bd6-1b18-4fcb-a4f8-8f4dddb26f22 req-a7f64bff-c1e5-4a76-8438-fd5b145d2e2d service nova] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Refreshing network info cache for port df67c123-2618-45ce-8175-66a34206293a {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1042.149209] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Releasing lock "refresh_cache-53a1791d-38fd-4721-b82c-2f0922348300" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1042.152195] env[62070]: DEBUG nova.scheduler.client.report [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1042.242168] env[62070]: DEBUG nova.virt.hardware [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1042.242168] env[62070]: DEBUG nova.virt.hardware [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1042.242168] env[62070]: DEBUG nova.virt.hardware [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1042.242168] env[62070]: DEBUG nova.virt.hardware [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1042.242168] env[62070]: DEBUG nova.virt.hardware [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1042.242168] env[62070]: DEBUG nova.virt.hardware [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1042.242168] env[62070]: DEBUG nova.virt.hardware [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1042.242168] env[62070]: DEBUG nova.virt.hardware [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1042.242168] env[62070]: DEBUG nova.virt.hardware [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1042.242168] env[62070]: DEBUG nova.virt.hardware [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1042.242729] env[62070]: DEBUG nova.virt.hardware [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1042.248838] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c725cad-9e39-4f1e-948c-8b56a7bcc70b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.270025] env[62070]: DEBUG oslo_vmware.api [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 1042.270025] env[62070]: value = "task-1122344" [ 1042.270025] env[62070]: _type = "Task" [ 1042.270025] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.280910] env[62070]: DEBUG oslo_vmware.api [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122344, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.324963] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122343, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.398354] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "fec23dd4-e956-42dd-b9a2-c8577f77cd81" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.398354] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "fec23dd4-e956-42dd-b9a2-c8577f77cd81" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.436848] env[62070]: INFO nova.network.neutron [req-7f7f3bd6-1b18-4fcb-a4f8-8f4dddb26f22 req-a7f64bff-c1e5-4a76-8438-fd5b145d2e2d service nova] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Port df67c123-2618-45ce-8175-66a34206293a from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1042.436848] env[62070]: DEBUG nova.network.neutron [req-7f7f3bd6-1b18-4fcb-a4f8-8f4dddb26f22 req-a7f64bff-c1e5-4a76-8438-fd5b145d2e2d service nova] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.574612] env[62070]: DEBUG nova.compute.manager [req-79a6ab7c-3f15-48c5-8cd1-4292da7e7741 req-ef7cf625-5098-46fc-a2e6-04bb16680f0b service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Received event network-vif-unplugged-2c6759e4-b6e7-4b67-b06d-d38d6043d3b2 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1042.574851] env[62070]: DEBUG oslo_concurrency.lockutils [req-79a6ab7c-3f15-48c5-8cd1-4292da7e7741 req-ef7cf625-5098-46fc-a2e6-04bb16680f0b service nova] Acquiring lock "53a1791d-38fd-4721-b82c-2f0922348300-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.575208] env[62070]: DEBUG oslo_concurrency.lockutils [req-79a6ab7c-3f15-48c5-8cd1-4292da7e7741 req-ef7cf625-5098-46fc-a2e6-04bb16680f0b service nova] Lock "53a1791d-38fd-4721-b82c-2f0922348300-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.575463] env[62070]: DEBUG oslo_concurrency.lockutils [req-79a6ab7c-3f15-48c5-8cd1-4292da7e7741 req-ef7cf625-5098-46fc-a2e6-04bb16680f0b service nova] Lock "53a1791d-38fd-4721-b82c-2f0922348300-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.575697] env[62070]: DEBUG nova.compute.manager [req-79a6ab7c-3f15-48c5-8cd1-4292da7e7741 req-ef7cf625-5098-46fc-a2e6-04bb16680f0b service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] No waiting events found dispatching network-vif-unplugged-2c6759e4-b6e7-4b67-b06d-d38d6043d3b2 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1042.575926] env[62070]: WARNING nova.compute.manager [req-79a6ab7c-3f15-48c5-8cd1-4292da7e7741 req-ef7cf625-5098-46fc-a2e6-04bb16680f0b service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Received unexpected event network-vif-unplugged-2c6759e4-b6e7-4b67-b06d-d38d6043d3b2 for instance with vm_state shelved and task_state shelving_offloading. [ 1042.595062] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1042.595939] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b095c22-f44c-4463-95bb-40d20e67b840 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.605425] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1042.605730] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bceb6722-c00d-4006-a5cb-14d624fbcb3b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.663027] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.323s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.663027] env[62070]: DEBUG nova.compute.manager [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1042.665531] env[62070]: DEBUG oslo_concurrency.lockutils [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.423s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.665763] env[62070]: DEBUG nova.objects.instance [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lazy-loading 'resources' on Instance uuid 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1042.781453] env[62070]: DEBUG oslo_vmware.api [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122344, 'name': ReconfigVM_Task, 'duration_secs': 0.199892} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.781677] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Updating instance '33d04e59-da01-4ba3-ac42-ab93372a332d' progress to 33 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1042.822918] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122343, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.652733} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.823210] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 000a67eb-9535-4da6-816a-b61126f11509/000a67eb-9535-4da6-816a-b61126f11509.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1042.823641] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1042.823904] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9dfab255-1e3e-4fe5-9c5f-323400a20d85 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.831844] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1042.831844] env[62070]: value = "task-1122346" [ 1042.831844] env[62070]: _type = "Task" [ 1042.831844] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.840385] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122346, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.900242] env[62070]: DEBUG nova.compute.manager [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1042.938189] env[62070]: DEBUG oslo_concurrency.lockutils [req-7f7f3bd6-1b18-4fcb-a4f8-8f4dddb26f22 req-a7f64bff-c1e5-4a76-8438-fd5b145d2e2d service nova] Releasing lock "refresh_cache-1ababba6-838c-4ba6-bd83-e2b15aaf4b97" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1042.977755] env[62070]: DEBUG nova.network.neutron [-] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.994587] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1042.994879] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1042.995145] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Deleting the datastore file [datastore1] 53a1791d-38fd-4721-b82c-2f0922348300 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1042.995635] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d00cf81-d256-4f8d-a634-8c5517033914 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.004493] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 1043.004493] env[62070]: value = "task-1122347" [ 1043.004493] env[62070]: _type = "Task" [ 1043.004493] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.015507] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122347, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.169172] env[62070]: DEBUG nova.compute.utils [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1043.174086] env[62070]: DEBUG nova.compute.manager [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1043.174205] env[62070]: DEBUG nova.network.neutron [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1043.262097] env[62070]: DEBUG nova.policy [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0aa820b3e16d4d6fbc6bda0b232025fc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c91e5eeeeb1742f499b2edaf76a93a3b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 1043.288970] env[62070]: DEBUG nova.virt.hardware [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1043.288970] env[62070]: DEBUG nova.virt.hardware [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1043.288970] env[62070]: DEBUG nova.virt.hardware [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1043.288970] env[62070]: DEBUG nova.virt.hardware [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1043.288970] env[62070]: DEBUG nova.virt.hardware [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1043.289430] env[62070]: DEBUG nova.virt.hardware [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1043.289430] env[62070]: DEBUG nova.virt.hardware [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1043.289573] env[62070]: DEBUG nova.virt.hardware [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1043.289690] env[62070]: DEBUG nova.virt.hardware [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1043.290720] env[62070]: DEBUG nova.virt.hardware [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1043.290720] env[62070]: DEBUG nova.virt.hardware [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1043.296588] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Reconfiguring VM instance instance-00000060 to detach disk 2000 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1043.300307] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d128d96f-b42d-4f32-9d64-153fc4c2108d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.323447] env[62070]: DEBUG oslo_vmware.api [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 1043.323447] env[62070]: value = "task-1122348" [ 1043.323447] env[62070]: _type = "Task" [ 1043.323447] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.334279] env[62070]: DEBUG oslo_vmware.api [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122348, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.346477] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122346, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087914} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.346767] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1043.348442] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13068968-60e6-440b-ac09-94b749269c61 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.377571] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] 000a67eb-9535-4da6-816a-b61126f11509/000a67eb-9535-4da6-816a-b61126f11509.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1043.380638] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ad034f4-1ec4-491d-906b-086abedea5c9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.402233] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1043.402233] env[62070]: value = "task-1122349" [ 1043.402233] env[62070]: _type = "Task" [ 1043.402233] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.415982] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122349, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.429588] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.433425] env[62070]: DEBUG oslo_concurrency.lockutils [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.433722] env[62070]: DEBUG oslo_concurrency.lockutils [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.433939] env[62070]: DEBUG oslo_concurrency.lockutils [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.434172] env[62070]: DEBUG oslo_concurrency.lockutils [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.434367] env[62070]: DEBUG oslo_concurrency.lockutils [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.436765] env[62070]: INFO nova.compute.manager [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Terminating instance [ 1043.439181] env[62070]: DEBUG nova.compute.manager [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1043.439181] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1043.439934] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb4fb03-38e5-40e9-874b-6bf6fd10efb0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.451245] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1043.451245] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4e8b6603-2c3a-4bae-9c91-e178eee3320e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.458117] env[62070]: DEBUG oslo_vmware.api [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 1043.458117] env[62070]: value = "task-1122350" [ 1043.458117] env[62070]: _type = "Task" [ 1043.458117] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.471040] env[62070]: DEBUG oslo_vmware.api [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122350, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.480701] env[62070]: INFO nova.compute.manager [-] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Took 1.83 seconds to deallocate network for instance. [ 1043.498939] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837144ba-8e0c-40e9-8012-2081538288d2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.510669] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28cd75e2-5097-4934-b68e-e5e872a8dd64 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.519689] env[62070]: DEBUG oslo_vmware.api [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122347, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.350993} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.550285] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1043.550441] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1043.551540] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1043.554795] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd1e27be-cd7d-4cf3-b12c-460a6cb5f805 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.562620] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f6dcf3-778a-4cf6-a03d-104b75fa2ec0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.568778] env[62070]: DEBUG nova.network.neutron [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Successfully created port: 629028b1-5fa6-4d6e-ba82-8c3c52f44a32 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1043.579767] env[62070]: DEBUG nova.compute.provider_tree [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1043.582041] env[62070]: INFO nova.scheduler.client.report [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Deleted allocations for instance 53a1791d-38fd-4721-b82c-2f0922348300 [ 1043.675291] env[62070]: DEBUG nova.compute.manager [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1043.834232] env[62070]: DEBUG oslo_vmware.api [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122348, 'name': ReconfigVM_Task, 'duration_secs': 0.192382} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.834572] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Reconfigured VM instance instance-00000060 to detach disk 2000 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1043.835462] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb2d258-dc3f-4220-beb6-5fd49a6310b2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.859584] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 33d04e59-da01-4ba3-ac42-ab93372a332d/33d04e59-da01-4ba3-ac42-ab93372a332d.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1043.859928] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b17e4ee1-9117-4734-a6f9-6b40e51f3bed {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.880908] env[62070]: DEBUG oslo_vmware.api [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 1043.880908] env[62070]: value = "task-1122351" [ 1043.880908] env[62070]: _type = "Task" [ 1043.880908] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.892166] env[62070]: DEBUG oslo_vmware.api [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122351, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.915539] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122349, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.967435] env[62070]: DEBUG oslo_vmware.api [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122350, 'name': PowerOffVM_Task, 'duration_secs': 0.477054} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.967794] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1043.968057] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1043.968393] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a89a01dc-1fd3-4ea6-877a-3d397511009b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.989334] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.028420] env[62070]: DEBUG nova.compute.manager [req-97c74fcb-75a6-411d-89f9-619d01e65783 req-4a2e8b76-cdd2-428b-94b7-4f1cb2da538a service nova] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Received event network-vif-deleted-df67c123-2618-45ce-8175-66a34206293a {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1044.034415] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1044.035145] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1044.035145] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Deleting the datastore file [datastore2] d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1044.035259] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eff34883-2110-48af-a3f7-dfa60c2a1955 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.042806] env[62070]: DEBUG oslo_vmware.api [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for the task: (returnval){ [ 1044.042806] env[62070]: value = "task-1122353" [ 1044.042806] env[62070]: _type = "Task" [ 1044.042806] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.053584] env[62070]: DEBUG oslo_vmware.api [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122353, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.090421] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.090421] env[62070]: DEBUG nova.scheduler.client.report [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1044.390784] env[62070]: DEBUG oslo_vmware.api [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122351, 'name': ReconfigVM_Task, 'duration_secs': 0.290121} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.391283] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 33d04e59-da01-4ba3-ac42-ab93372a332d/33d04e59-da01-4ba3-ac42-ab93372a332d.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1044.391370] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Updating instance '33d04e59-da01-4ba3-ac42-ab93372a332d' progress to 50 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1044.413152] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122349, 'name': ReconfigVM_Task, 'duration_secs': 0.697168} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.413152] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Reconfigured VM instance instance-00000065 to attach disk [datastore2] 000a67eb-9535-4da6-816a-b61126f11509/000a67eb-9535-4da6-816a-b61126f11509.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1044.413879] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6abb312f-9799-4f49-ad57-62f935b57ee8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.420476] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1044.420476] env[62070]: value = "task-1122354" [ 1044.420476] env[62070]: _type = "Task" [ 1044.420476] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.428420] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122354, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.557626] env[62070]: DEBUG oslo_vmware.api [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Task: {'id': task-1122353, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161451} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.557880] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1044.558072] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1044.558317] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1044.558491] env[62070]: INFO nova.compute.manager [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1044.558736] env[62070]: DEBUG oslo.service.loopingcall [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1044.558926] env[62070]: DEBUG nova.compute.manager [-] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1044.559032] env[62070]: DEBUG nova.network.neutron [-] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1044.597158] env[62070]: DEBUG oslo_concurrency.lockutils [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.932s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.600466] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.171s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.602164] env[62070]: INFO nova.compute.claims [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1044.605770] env[62070]: DEBUG nova.compute.manager [req-9c9d2e0b-02c4-47e9-91cc-29c2d53a4b39 req-c9d48e6a-9bf0-43ab-9b66-13755ed77a0b service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Received event network-changed-2c6759e4-b6e7-4b67-b06d-d38d6043d3b2 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1044.605950] env[62070]: DEBUG nova.compute.manager [req-9c9d2e0b-02c4-47e9-91cc-29c2d53a4b39 req-c9d48e6a-9bf0-43ab-9b66-13755ed77a0b service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Refreshing instance network info cache due to event network-changed-2c6759e4-b6e7-4b67-b06d-d38d6043d3b2. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1044.606177] env[62070]: DEBUG oslo_concurrency.lockutils [req-9c9d2e0b-02c4-47e9-91cc-29c2d53a4b39 req-c9d48e6a-9bf0-43ab-9b66-13755ed77a0b service nova] Acquiring lock "refresh_cache-53a1791d-38fd-4721-b82c-2f0922348300" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.606320] env[62070]: DEBUG oslo_concurrency.lockutils [req-9c9d2e0b-02c4-47e9-91cc-29c2d53a4b39 req-c9d48e6a-9bf0-43ab-9b66-13755ed77a0b service nova] Acquired lock "refresh_cache-53a1791d-38fd-4721-b82c-2f0922348300" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.606479] env[62070]: DEBUG nova.network.neutron [req-9c9d2e0b-02c4-47e9-91cc-29c2d53a4b39 req-c9d48e6a-9bf0-43ab-9b66-13755ed77a0b service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Refreshing network info cache for port 2c6759e4-b6e7-4b67-b06d-d38d6043d3b2 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1044.629780] env[62070]: INFO nova.scheduler.client.report [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Deleted allocations for instance 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0 [ 1044.684727] env[62070]: DEBUG nova.compute.manager [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1044.719935] env[62070]: DEBUG nova.virt.hardware [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1044.720211] env[62070]: DEBUG nova.virt.hardware [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1044.720377] env[62070]: DEBUG nova.virt.hardware [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1044.720563] env[62070]: DEBUG nova.virt.hardware [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1044.720759] env[62070]: DEBUG nova.virt.hardware [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1044.720927] env[62070]: DEBUG nova.virt.hardware [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1044.721150] env[62070]: DEBUG nova.virt.hardware [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1044.721360] env[62070]: DEBUG nova.virt.hardware [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1044.721546] env[62070]: DEBUG nova.virt.hardware [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1044.721738] env[62070]: DEBUG nova.virt.hardware [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1044.721919] env[62070]: DEBUG nova.virt.hardware [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1044.726038] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-398efc66-8943-430d-b222-585008046139 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.731801] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f1ddd7-3858-4573-859d-decefe3ee577 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.901057] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa89ee9-a36a-4cad-8461-e0a8f3ab91ac {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.918030] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f96fbd-f546-4921-9ce0-4099537a96cd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.938586] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Updating instance '33d04e59-da01-4ba3-ac42-ab93372a332d' progress to 67 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1044.949928] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122354, 'name': Rename_Task, 'duration_secs': 0.146309} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.950210] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1044.950683] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-30c81e56-c624-469a-b5c3-ae727cf20e9c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.957087] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1044.957087] env[62070]: value = "task-1122355" [ 1044.957087] env[62070]: _type = "Task" [ 1044.957087] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.967308] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122355, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.136560] env[62070]: DEBUG oslo_concurrency.lockutils [None req-44d33cde-4ced-4c6b-a9e1-712161220918 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "6b6e22b9-71fb-4139-993a-7b9fcf89d8e0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.285525] env[62070]: DEBUG nova.network.neutron [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Successfully updated port: 629028b1-5fa6-4d6e-ba82-8c3c52f44a32 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1045.378602] env[62070]: DEBUG nova.network.neutron [-] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.467528] env[62070]: DEBUG oslo_vmware.api [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122355, 'name': PowerOnVM_Task, 'duration_secs': 0.489553} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.468193] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1045.468562] env[62070]: INFO nova.compute.manager [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Took 8.47 seconds to spawn the instance on the hypervisor. [ 1045.468821] env[62070]: DEBUG nova.compute.manager [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1045.469922] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-317fb09c-2e91-423e-ad69-b54f902e349b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.496421] env[62070]: DEBUG nova.network.neutron [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Port 222fba36-759a-41f7-a82a-cb4047bd3725 binding to destination host cpu-1 is already ACTIVE {{(pid=62070) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1045.508300] env[62070]: DEBUG nova.network.neutron [req-9c9d2e0b-02c4-47e9-91cc-29c2d53a4b39 req-c9d48e6a-9bf0-43ab-9b66-13755ed77a0b service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Updated VIF entry in instance network info cache for port 2c6759e4-b6e7-4b67-b06d-d38d6043d3b2. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1045.509269] env[62070]: DEBUG nova.network.neutron [req-9c9d2e0b-02c4-47e9-91cc-29c2d53a4b39 req-c9d48e6a-9bf0-43ab-9b66-13755ed77a0b service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Updating instance_info_cache with network_info: [{"id": "2c6759e4-b6e7-4b67-b06d-d38d6043d3b2", "address": "fa:16:3e:11:96:b8", "network": {"id": "6a62b79f-a98b-4518-86cb-facc7b77da1d", "bridge": null, "label": "tempest-ServersNegativeTestJSON-2107556336-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "772f7fcee5f44b899b6df797e1ed5ddd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap2c6759e4-b6", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.594913] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquiring lock "53a1791d-38fd-4721-b82c-2f0922348300" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.793617] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "refresh_cache-d8284a01-bbf6-4607-b2db-33bf2cd5457d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1045.793617] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquired lock "refresh_cache-d8284a01-bbf6-4607-b2db-33bf2cd5457d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.793617] env[62070]: DEBUG nova.network.neutron [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1045.848773] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a22038-de36-40b1-85a1-8c122ab735af {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.857354] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-831b313a-60de-4ef1-82b9-080d2489d497 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.889579] env[62070]: INFO nova.compute.manager [-] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Took 1.33 seconds to deallocate network for instance. [ 1045.894023] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee1372fd-c1f1-41a7-bd46-23ae6abe41b8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.905806] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c19725f-b882-4259-b566-757171e2844c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.921044] env[62070]: DEBUG nova.compute.provider_tree [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1045.990944] env[62070]: INFO nova.compute.manager [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Took 22.70 seconds to build instance. [ 1046.014299] env[62070]: DEBUG oslo_concurrency.lockutils [req-9c9d2e0b-02c4-47e9-91cc-29c2d53a4b39 req-c9d48e6a-9bf0-43ab-9b66-13755ed77a0b service nova] Releasing lock "refresh_cache-53a1791d-38fd-4721-b82c-2f0922348300" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1046.062176] env[62070]: DEBUG nova.compute.manager [req-26d7a074-326b-48d9-84c9-b943406a9d47 req-f421817d-67d0-430d-8c0a-e0a771c9d5de service nova] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Received event network-vif-deleted-e07c1b27-68ca-4e3b-8554-fb5fabd9c0f4 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1046.332128] env[62070]: DEBUG nova.network.neutron [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1046.407349] env[62070]: DEBUG oslo_concurrency.lockutils [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.425091] env[62070]: DEBUG nova.scheduler.client.report [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1046.491702] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0ef37a9-6db8-476c-932a-937fc6ece311 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "000a67eb-9535-4da6-816a-b61126f11509" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.214s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.495016] env[62070]: DEBUG nova.network.neutron [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Updating instance_info_cache with network_info: [{"id": "629028b1-5fa6-4d6e-ba82-8c3c52f44a32", "address": "fa:16:3e:8d:18:bf", "network": {"id": "0d81bd04-b549-4e1f-97a2-0a0b9391dd3f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-108214409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c91e5eeeeb1742f499b2edaf76a93a3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap629028b1-5f", "ovs_interfaceid": "629028b1-5fa6-4d6e-ba82-8c3c52f44a32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.499712] env[62070]: DEBUG oslo_concurrency.lockutils [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "27d9b478-7ebb-4313-a314-679ca0292086" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.499944] env[62070]: DEBUG oslo_concurrency.lockutils [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "27d9b478-7ebb-4313-a314-679ca0292086" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.523022] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "33d04e59-da01-4ba3-ac42-ab93372a332d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.523022] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "33d04e59-da01-4ba3-ac42-ab93372a332d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.523022] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "33d04e59-da01-4ba3-ac42-ab93372a332d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.637429] env[62070]: DEBUG nova.compute.manager [req-e258b125-b125-45a6-8b14-bb5962ef0c7d req-658583ad-7ef0-44e0-b39d-ff4a3181f266 service nova] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Received event network-vif-plugged-629028b1-5fa6-4d6e-ba82-8c3c52f44a32 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1046.637516] env[62070]: DEBUG oslo_concurrency.lockutils [req-e258b125-b125-45a6-8b14-bb5962ef0c7d req-658583ad-7ef0-44e0-b39d-ff4a3181f266 service nova] Acquiring lock "d8284a01-bbf6-4607-b2db-33bf2cd5457d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.638011] env[62070]: DEBUG oslo_concurrency.lockutils [req-e258b125-b125-45a6-8b14-bb5962ef0c7d req-658583ad-7ef0-44e0-b39d-ff4a3181f266 service nova] Lock "d8284a01-bbf6-4607-b2db-33bf2cd5457d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.638592] env[62070]: DEBUG oslo_concurrency.lockutils [req-e258b125-b125-45a6-8b14-bb5962ef0c7d req-658583ad-7ef0-44e0-b39d-ff4a3181f266 service nova] Lock "d8284a01-bbf6-4607-b2db-33bf2cd5457d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.638592] env[62070]: DEBUG nova.compute.manager [req-e258b125-b125-45a6-8b14-bb5962ef0c7d req-658583ad-7ef0-44e0-b39d-ff4a3181f266 service nova] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] No waiting events found dispatching network-vif-plugged-629028b1-5fa6-4d6e-ba82-8c3c52f44a32 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1046.638750] env[62070]: WARNING nova.compute.manager [req-e258b125-b125-45a6-8b14-bb5962ef0c7d req-658583ad-7ef0-44e0-b39d-ff4a3181f266 service nova] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Received unexpected event network-vif-plugged-629028b1-5fa6-4d6e-ba82-8c3c52f44a32 for instance with vm_state building and task_state spawning. [ 1046.638834] env[62070]: DEBUG nova.compute.manager [req-e258b125-b125-45a6-8b14-bb5962ef0c7d req-658583ad-7ef0-44e0-b39d-ff4a3181f266 service nova] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Received event network-changed-629028b1-5fa6-4d6e-ba82-8c3c52f44a32 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1046.638997] env[62070]: DEBUG nova.compute.manager [req-e258b125-b125-45a6-8b14-bb5962ef0c7d req-658583ad-7ef0-44e0-b39d-ff4a3181f266 service nova] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Refreshing instance network info cache due to event network-changed-629028b1-5fa6-4d6e-ba82-8c3c52f44a32. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1046.639236] env[62070]: DEBUG oslo_concurrency.lockutils [req-e258b125-b125-45a6-8b14-bb5962ef0c7d req-658583ad-7ef0-44e0-b39d-ff4a3181f266 service nova] Acquiring lock "refresh_cache-d8284a01-bbf6-4607-b2db-33bf2cd5457d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.930984] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.330s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.936017] env[62070]: DEBUG nova.compute.manager [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1046.939489] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.950s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.940100] env[62070]: DEBUG nova.objects.instance [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Lazy-loading 'resources' on Instance uuid 1ababba6-838c-4ba6-bd83-e2b15aaf4b97 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1047.001065] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Releasing lock "refresh_cache-d8284a01-bbf6-4607-b2db-33bf2cd5457d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1047.001563] env[62070]: DEBUG nova.compute.manager [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Instance network_info: |[{"id": "629028b1-5fa6-4d6e-ba82-8c3c52f44a32", "address": "fa:16:3e:8d:18:bf", "network": {"id": "0d81bd04-b549-4e1f-97a2-0a0b9391dd3f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-108214409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c91e5eeeeb1742f499b2edaf76a93a3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap629028b1-5f", "ovs_interfaceid": "629028b1-5fa6-4d6e-ba82-8c3c52f44a32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1047.002736] env[62070]: DEBUG nova.compute.manager [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1047.005826] env[62070]: DEBUG oslo_concurrency.lockutils [req-e258b125-b125-45a6-8b14-bb5962ef0c7d req-658583ad-7ef0-44e0-b39d-ff4a3181f266 service nova] Acquired lock "refresh_cache-d8284a01-bbf6-4607-b2db-33bf2cd5457d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.009291] env[62070]: DEBUG nova.network.neutron [req-e258b125-b125-45a6-8b14-bb5962ef0c7d req-658583ad-7ef0-44e0-b39d-ff4a3181f266 service nova] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Refreshing network info cache for port 629028b1-5fa6-4d6e-ba82-8c3c52f44a32 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1047.010484] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:18:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cf5bfbae-a882-4d34-be33-b31e274b3077', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '629028b1-5fa6-4d6e-ba82-8c3c52f44a32', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1047.018673] env[62070]: DEBUG oslo.service.loopingcall [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1047.020094] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1047.020416] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-246b7dcd-88c8-4ae0-abf7-0c8137388743 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.045827] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1047.045827] env[62070]: value = "task-1122356" [ 1047.045827] env[62070]: _type = "Task" [ 1047.045827] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.061400] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122356, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.443583] env[62070]: DEBUG nova.compute.utils [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1047.448819] env[62070]: DEBUG nova.compute.manager [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1047.449585] env[62070]: DEBUG nova.network.neutron [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1047.510180] env[62070]: DEBUG nova.policy [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0ab707a4862f42199fc2a91733563cde', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f29ac48ab6544ec0bd1d210aec05dbc5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 1047.545937] env[62070]: DEBUG oslo_concurrency.lockutils [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.559682] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122356, 'name': CreateVM_Task, 'duration_secs': 0.307094} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.560308] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1047.561010] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1047.561589] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.561841] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1047.564559] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c15b6b2b-d66a-4c99-8d95-a5a323dd394d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.569895] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1047.569895] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5284a90f-a617-f95a-0ae4-8f0c564a966e" [ 1047.569895] env[62070]: _type = "Task" [ 1047.569895] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.579802] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5284a90f-a617-f95a-0ae4-8f0c564a966e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.657528] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "refresh_cache-33d04e59-da01-4ba3-ac42-ab93372a332d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1047.657728] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired lock "refresh_cache-33d04e59-da01-4ba3-ac42-ab93372a332d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.657924] env[62070]: DEBUG nova.network.neutron [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1047.708627] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb740341-045c-44b7-81bc-e3498a306e3d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.718740] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3de1325-e902-41fc-b986-7116cef2e82d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.761213] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89765e50-0a90-490a-84e0-47b922b184a5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.769467] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d138a3e9-879c-4e85-8b4c-8f0b84bcdd52 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.786108] env[62070]: DEBUG nova.compute.provider_tree [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1047.887085] env[62070]: DEBUG nova.network.neutron [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Successfully created port: 933a577b-8b0c-4c0d-ae12-372e4b70b7c9 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1047.949858] env[62070]: DEBUG nova.compute.manager [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1048.038115] env[62070]: DEBUG nova.network.neutron [req-e258b125-b125-45a6-8b14-bb5962ef0c7d req-658583ad-7ef0-44e0-b39d-ff4a3181f266 service nova] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Updated VIF entry in instance network info cache for port 629028b1-5fa6-4d6e-ba82-8c3c52f44a32. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1048.039539] env[62070]: DEBUG nova.network.neutron [req-e258b125-b125-45a6-8b14-bb5962ef0c7d req-658583ad-7ef0-44e0-b39d-ff4a3181f266 service nova] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Updating instance_info_cache with network_info: [{"id": "629028b1-5fa6-4d6e-ba82-8c3c52f44a32", "address": "fa:16:3e:8d:18:bf", "network": {"id": "0d81bd04-b549-4e1f-97a2-0a0b9391dd3f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-108214409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c91e5eeeeb1742f499b2edaf76a93a3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap629028b1-5f", "ovs_interfaceid": "629028b1-5fa6-4d6e-ba82-8c3c52f44a32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.087150] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5284a90f-a617-f95a-0ae4-8f0c564a966e, 'name': SearchDatastore_Task, 'duration_secs': 0.00937} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.087515] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1048.087776] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1048.088030] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1048.088210] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.088398] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1048.088683] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80a44981-6932-4163-aa7a-81ed7b3670ba {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.099532] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1048.099814] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1048.100830] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7f779e4-4037-4812-85f3-1e9ec730afab {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.110623] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1048.110623] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]524d77a8-1018-2f57-f60a-8623e891d0ed" [ 1048.110623] env[62070]: _type = "Task" [ 1048.110623] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.119951] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]524d77a8-1018-2f57-f60a-8623e891d0ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.292087] env[62070]: DEBUG nova.scheduler.client.report [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1048.432908] env[62070]: DEBUG nova.network.neutron [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Updating instance_info_cache with network_info: [{"id": "222fba36-759a-41f7-a82a-cb4047bd3725", "address": "fa:16:3e:7a:4c:a1", "network": {"id": "5ea0fffc-372c-450e-b27b-10959077d58f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1853458988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9191f0e6c2ee401abca64c0780e230bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3c995e9-7f2f-420c-880a-d60da6e708ad", "external-id": "nsx-vlan-transportzone-166", "segmentation_id": 166, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap222fba36-75", "ovs_interfaceid": "222fba36-759a-41f7-a82a-cb4047bd3725", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.541855] env[62070]: DEBUG oslo_concurrency.lockutils [req-e258b125-b125-45a6-8b14-bb5962ef0c7d req-658583ad-7ef0-44e0-b39d-ff4a3181f266 service nova] Releasing lock "refresh_cache-d8284a01-bbf6-4607-b2db-33bf2cd5457d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1048.621889] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]524d77a8-1018-2f57-f60a-8623e891d0ed, 'name': SearchDatastore_Task, 'duration_secs': 0.009762} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.622752] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37528603-313b-4102-b6b1-77bea6cc202c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.628600] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1048.628600] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52413af6-202a-c70c-e264-d1523ba7edf3" [ 1048.628600] env[62070]: _type = "Task" [ 1048.628600] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.636674] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52413af6-202a-c70c-e264-d1523ba7edf3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.662913] env[62070]: DEBUG nova.compute.manager [req-b8e9db7e-e1e2-4767-9053-d89902296813 req-ddd7ba05-0850-4a8b-b54f-994619ebc5cc service nova] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Received event network-changed-57c0b4a0-14e4-4b19-b7ee-16842d63fd74 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1048.663206] env[62070]: DEBUG nova.compute.manager [req-b8e9db7e-e1e2-4767-9053-d89902296813 req-ddd7ba05-0850-4a8b-b54f-994619ebc5cc service nova] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Refreshing instance network info cache due to event network-changed-57c0b4a0-14e4-4b19-b7ee-16842d63fd74. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1048.663586] env[62070]: DEBUG oslo_concurrency.lockutils [req-b8e9db7e-e1e2-4767-9053-d89902296813 req-ddd7ba05-0850-4a8b-b54f-994619ebc5cc service nova] Acquiring lock "refresh_cache-000a67eb-9535-4da6-816a-b61126f11509" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1048.663749] env[62070]: DEBUG oslo_concurrency.lockutils [req-b8e9db7e-e1e2-4767-9053-d89902296813 req-ddd7ba05-0850-4a8b-b54f-994619ebc5cc service nova] Acquired lock "refresh_cache-000a67eb-9535-4da6-816a-b61126f11509" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.663950] env[62070]: DEBUG nova.network.neutron [req-b8e9db7e-e1e2-4767-9053-d89902296813 req-ddd7ba05-0850-4a8b-b54f-994619ebc5cc service nova] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Refreshing network info cache for port 57c0b4a0-14e4-4b19-b7ee-16842d63fd74 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1048.796247] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.857s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.798600] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.710s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.798828] env[62070]: DEBUG nova.objects.instance [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lazy-loading 'resources' on Instance uuid 53a1791d-38fd-4721-b82c-2f0922348300 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1048.813078] env[62070]: INFO nova.scheduler.client.report [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Deleted allocations for instance 1ababba6-838c-4ba6-bd83-e2b15aaf4b97 [ 1048.936065] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Releasing lock "refresh_cache-33d04e59-da01-4ba3-ac42-ab93372a332d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1048.962516] env[62070]: DEBUG nova.compute.manager [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1048.982808] env[62070]: DEBUG nova.virt.hardware [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1048.983092] env[62070]: DEBUG nova.virt.hardware [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1048.983259] env[62070]: DEBUG nova.virt.hardware [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1048.983447] env[62070]: DEBUG nova.virt.hardware [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1048.983628] env[62070]: DEBUG nova.virt.hardware [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1048.983820] env[62070]: DEBUG nova.virt.hardware [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1048.985811] env[62070]: DEBUG nova.virt.hardware [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1048.985811] env[62070]: DEBUG nova.virt.hardware [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1048.985811] env[62070]: DEBUG nova.virt.hardware [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1048.985811] env[62070]: DEBUG nova.virt.hardware [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1048.985811] env[62070]: DEBUG nova.virt.hardware [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1048.985811] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3bd6e20-5708-4435-95c6-adecc6b2c942 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.994719] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fae8484-21eb-4b3a-afaf-eb21a9936c7f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.139134] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52413af6-202a-c70c-e264-d1523ba7edf3, 'name': SearchDatastore_Task, 'duration_secs': 0.011061} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.139450] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1049.139715] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] d8284a01-bbf6-4607-b2db-33bf2cd5457d/d8284a01-bbf6-4607-b2db-33bf2cd5457d.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1049.141044] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d1d204d-14b6-4635-8e3e-e18754e58416 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.145744] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.145966] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.152354] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1049.152354] env[62070]: value = "task-1122357" [ 1049.152354] env[62070]: _type = "Task" [ 1049.152354] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.160496] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122357, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.302829] env[62070]: DEBUG nova.objects.instance [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lazy-loading 'numa_topology' on Instance uuid 53a1791d-38fd-4721-b82c-2f0922348300 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1049.320990] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1748f89f-2272-4737-a3fc-d0537c700b0b tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Lock "1ababba6-838c-4ba6-bd83-e2b15aaf4b97" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.818s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.469848] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e2ba28-c79b-46cd-be5b-e969fb793a38 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.506019] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a93b18-20bc-460f-a808-be2f1df661f4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.514039] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Updating instance '33d04e59-da01-4ba3-ac42-ab93372a332d' progress to 83 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1049.564816] env[62070]: DEBUG nova.network.neutron [req-b8e9db7e-e1e2-4767-9053-d89902296813 req-ddd7ba05-0850-4a8b-b54f-994619ebc5cc service nova] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Updated VIF entry in instance network info cache for port 57c0b4a0-14e4-4b19-b7ee-16842d63fd74. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1049.565515] env[62070]: DEBUG nova.network.neutron [req-b8e9db7e-e1e2-4767-9053-d89902296813 req-ddd7ba05-0850-4a8b-b54f-994619ebc5cc service nova] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Updating instance_info_cache with network_info: [{"id": "57c0b4a0-14e4-4b19-b7ee-16842d63fd74", "address": "fa:16:3e:87:a3:a2", "network": {"id": "6ea9aade-1b40-4ce8-a502-14ff09a4ab40", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1617295069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.186", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "735d24ccc5614660a5b34d77af648f94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57c0b4a0-14", "ovs_interfaceid": "57c0b4a0-14e4-4b19-b7ee-16842d63fd74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1049.630204] env[62070]: DEBUG nova.compute.manager [req-8cfcf3ef-d12d-48f9-a38c-9ad1fc690d1c req-743d3f84-17e5-451b-9e2d-e8bc3f64aeda service nova] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Received event network-vif-plugged-933a577b-8b0c-4c0d-ae12-372e4b70b7c9 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1049.630573] env[62070]: DEBUG oslo_concurrency.lockutils [req-8cfcf3ef-d12d-48f9-a38c-9ad1fc690d1c req-743d3f84-17e5-451b-9e2d-e8bc3f64aeda service nova] Acquiring lock "fec23dd4-e956-42dd-b9a2-c8577f77cd81-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.630822] env[62070]: DEBUG oslo_concurrency.lockutils [req-8cfcf3ef-d12d-48f9-a38c-9ad1fc690d1c req-743d3f84-17e5-451b-9e2d-e8bc3f64aeda service nova] Lock "fec23dd4-e956-42dd-b9a2-c8577f77cd81-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.631012] env[62070]: DEBUG oslo_concurrency.lockutils [req-8cfcf3ef-d12d-48f9-a38c-9ad1fc690d1c req-743d3f84-17e5-451b-9e2d-e8bc3f64aeda service nova] Lock "fec23dd4-e956-42dd-b9a2-c8577f77cd81-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.631200] env[62070]: DEBUG nova.compute.manager [req-8cfcf3ef-d12d-48f9-a38c-9ad1fc690d1c req-743d3f84-17e5-451b-9e2d-e8bc3f64aeda service nova] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] No waiting events found dispatching network-vif-plugged-933a577b-8b0c-4c0d-ae12-372e4b70b7c9 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1049.631374] env[62070]: WARNING nova.compute.manager [req-8cfcf3ef-d12d-48f9-a38c-9ad1fc690d1c req-743d3f84-17e5-451b-9e2d-e8bc3f64aeda service nova] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Received unexpected event network-vif-plugged-933a577b-8b0c-4c0d-ae12-372e4b70b7c9 for instance with vm_state building and task_state spawning. [ 1049.648493] env[62070]: DEBUG nova.compute.manager [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1049.662423] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122357, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483568} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.662855] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] d8284a01-bbf6-4607-b2db-33bf2cd5457d/d8284a01-bbf6-4607-b2db-33bf2cd5457d.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1049.663134] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1049.663669] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c59dedb0-73ed-4b61-b3a2-76ec190110e0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.670494] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1049.670494] env[62070]: value = "task-1122358" [ 1049.670494] env[62070]: _type = "Task" [ 1049.670494] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.679243] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122358, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.725213] env[62070]: DEBUG nova.network.neutron [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Successfully updated port: 933a577b-8b0c-4c0d-ae12-372e4b70b7c9 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1049.805222] env[62070]: DEBUG nova.objects.base [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Object Instance<53a1791d-38fd-4721-b82c-2f0922348300> lazy-loaded attributes: resources,numa_topology {{(pid=62070) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1049.992665] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d060a7c9-7598-4173-84ba-a6b2541d5350 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.001172] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd479fcd-7913-4d6c-a14b-6c647f54f838 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.039099] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1050.039732] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fffe17fa-e00b-48a0-825b-5f1818b96f39 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.042443] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b05a6eb-932e-44d7-be93-1f047dbfcdfd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.053618] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46d40eb7-a1aa-43bc-b543-39d7b9bea2b2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.057949] env[62070]: DEBUG oslo_vmware.api [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 1050.057949] env[62070]: value = "task-1122359" [ 1050.057949] env[62070]: _type = "Task" [ 1050.057949] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.069579] env[62070]: DEBUG oslo_concurrency.lockutils [req-b8e9db7e-e1e2-4767-9053-d89902296813 req-ddd7ba05-0850-4a8b-b54f-994619ebc5cc service nova] Releasing lock "refresh_cache-000a67eb-9535-4da6-816a-b61126f11509" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1050.070199] env[62070]: DEBUG nova.compute.provider_tree [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1050.076189] env[62070]: DEBUG oslo_vmware.api [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122359, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.176477] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.181646] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122358, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076851} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.181871] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1050.182650] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04976d95-4301-4f9e-99c6-881546ee8e82 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.206086] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] d8284a01-bbf6-4607-b2db-33bf2cd5457d/d8284a01-bbf6-4607-b2db-33bf2cd5457d.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1050.206372] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a93e02a6-2b21-4a16-9840-f0eae4acd6c5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.227538] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1050.227538] env[62070]: value = "task-1122360" [ 1050.227538] env[62070]: _type = "Task" [ 1050.227538] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.231642] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "refresh_cache-fec23dd4-e956-42dd-b9a2-c8577f77cd81" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1050.231642] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired lock "refresh_cache-fec23dd4-e956-42dd-b9a2-c8577f77cd81" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.231642] env[62070]: DEBUG nova.network.neutron [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1050.238568] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122360, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.568761] env[62070]: DEBUG oslo_vmware.api [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122359, 'name': PowerOnVM_Task, 'duration_secs': 0.407059} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.569061] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1050.570436] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bf2c8c8b-db77-4ce2-a07d-2a2532257024 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Updating instance '33d04e59-da01-4ba3-ac42-ab93372a332d' progress to 100 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1050.573678] env[62070]: DEBUG nova.scheduler.client.report [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1050.738835] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122360, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.781275] env[62070]: DEBUG nova.network.neutron [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1050.783551] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquiring lock "b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.783681] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Lock "b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1050.783914] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquiring lock "b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.784123] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Lock "b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1050.784298] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Lock "b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1050.787855] env[62070]: INFO nova.compute.manager [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Terminating instance [ 1050.789851] env[62070]: DEBUG nova.compute.manager [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1050.790067] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1050.790935] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fea0fe03-bafd-462d-b3ff-ff2ce8d2d1e4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.800542] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1050.800822] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-65cf226e-2004-4ea6-8d87-97cefa4eda9a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.806782] env[62070]: DEBUG oslo_vmware.api [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Waiting for the task: (returnval){ [ 1050.806782] env[62070]: value = "task-1122361" [ 1050.806782] env[62070]: _type = "Task" [ 1050.806782] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.817603] env[62070]: DEBUG oslo_vmware.api [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122361, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.940069] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e1da95e1-e153-4483-b838-13cc8e0b9b7f tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "5cccd79d-d243-49db-8581-718dd594f3b3" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.940341] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e1da95e1-e153-4483-b838-13cc8e0b9b7f tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "5cccd79d-d243-49db-8581-718dd594f3b3" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1050.963629] env[62070]: DEBUG nova.network.neutron [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Updating instance_info_cache with network_info: [{"id": "933a577b-8b0c-4c0d-ae12-372e4b70b7c9", "address": "fa:16:3e:34:74:6b", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap933a577b-8b", "ovs_interfaceid": "933a577b-8b0c-4c0d-ae12-372e4b70b7c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.080490] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.282s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.082913] env[62070]: DEBUG oslo_concurrency.lockutils [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.676s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.083173] env[62070]: DEBUG nova.objects.instance [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lazy-loading 'resources' on Instance uuid d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1051.238997] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122360, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.316248] env[62070]: DEBUG oslo_vmware.api [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122361, 'name': PowerOffVM_Task, 'duration_secs': 0.249471} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.316525] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1051.316696] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1051.316947] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-16a113ba-6940-4546-9b52-0dde19ea8534 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.382940] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1051.383188] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1051.383456] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Deleting the datastore file [datastore2] b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1051.383741] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-503909ae-19f7-48a2-8cb1-09da24c7932e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.394070] env[62070]: DEBUG oslo_vmware.api [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Waiting for the task: (returnval){ [ 1051.394070] env[62070]: value = "task-1122363" [ 1051.394070] env[62070]: _type = "Task" [ 1051.394070] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.401940] env[62070]: DEBUG oslo_vmware.api [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122363, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.443160] env[62070]: DEBUG nova.compute.utils [None req-e1da95e1-e153-4483-b838-13cc8e0b9b7f tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1051.466691] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Releasing lock "refresh_cache-fec23dd4-e956-42dd-b9a2-c8577f77cd81" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1051.467025] env[62070]: DEBUG nova.compute.manager [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Instance network_info: |[{"id": "933a577b-8b0c-4c0d-ae12-372e4b70b7c9", "address": "fa:16:3e:34:74:6b", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap933a577b-8b", "ovs_interfaceid": "933a577b-8b0c-4c0d-ae12-372e4b70b7c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1051.467452] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:74:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1559ce49-7345-443f-bf02-4bfeb88356ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '933a577b-8b0c-4c0d-ae12-372e4b70b7c9', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1051.475602] env[62070]: DEBUG oslo.service.loopingcall [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1051.476072] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1051.476317] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fc4ee11c-f199-4d10-8d6c-54d2060d0530 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.496646] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1051.496646] env[62070]: value = "task-1122364" [ 1051.496646] env[62070]: _type = "Task" [ 1051.496646] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.504138] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122364, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.594245] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e326279a-fafc-42e7-9e62-9e1b8572336d tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "53a1791d-38fd-4721-b82c-2f0922348300" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 29.351s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.595594] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "53a1791d-38fd-4721-b82c-2f0922348300" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 6.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.595895] env[62070]: INFO nova.compute.manager [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Unshelving [ 1051.731555] env[62070]: DEBUG nova.compute.manager [req-7915d9a1-c437-4a28-abd8-7c836ed09f6b req-b4691832-ddae-4859-890e-e7dea7f57e4b service nova] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Received event network-changed-933a577b-8b0c-4c0d-ae12-372e4b70b7c9 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1051.732027] env[62070]: DEBUG nova.compute.manager [req-7915d9a1-c437-4a28-abd8-7c836ed09f6b req-b4691832-ddae-4859-890e-e7dea7f57e4b service nova] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Refreshing instance network info cache due to event network-changed-933a577b-8b0c-4c0d-ae12-372e4b70b7c9. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1051.732142] env[62070]: DEBUG oslo_concurrency.lockutils [req-7915d9a1-c437-4a28-abd8-7c836ed09f6b req-b4691832-ddae-4859-890e-e7dea7f57e4b service nova] Acquiring lock "refresh_cache-fec23dd4-e956-42dd-b9a2-c8577f77cd81" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1051.732349] env[62070]: DEBUG oslo_concurrency.lockutils [req-7915d9a1-c437-4a28-abd8-7c836ed09f6b req-b4691832-ddae-4859-890e-e7dea7f57e4b service nova] Acquired lock "refresh_cache-fec23dd4-e956-42dd-b9a2-c8577f77cd81" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.732455] env[62070]: DEBUG nova.network.neutron [req-7915d9a1-c437-4a28-abd8-7c836ed09f6b req-b4691832-ddae-4859-890e-e7dea7f57e4b service nova] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Refreshing network info cache for port 933a577b-8b0c-4c0d-ae12-372e4b70b7c9 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1051.749901] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122360, 'name': ReconfigVM_Task, 'duration_secs': 1.179832} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.750504] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Reconfigured VM instance instance-00000066 to attach disk [datastore1] d8284a01-bbf6-4607-b2db-33bf2cd5457d/d8284a01-bbf6-4607-b2db-33bf2cd5457d.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1051.751164] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a6846227-fcdc-48da-a88c-57793ddc59b7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.757058] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1051.757058] env[62070]: value = "task-1122365" [ 1051.757058] env[62070]: _type = "Task" [ 1051.757058] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.765444] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122365, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.801944] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9040b524-9536-45bf-b83e-5429d61e004e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.809788] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54408442-89a6-4793-8428-2c913c2ebde7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.842304] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8075eaf7-8a47-4bff-9537-7009aed8ca67 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.851034] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad0a767-6be3-4fb1-a9f4-f7896258186c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.864649] env[62070]: DEBUG nova.compute.provider_tree [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1051.903369] env[62070]: DEBUG oslo_vmware.api [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Task: {'id': task-1122363, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.295058} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.903644] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1051.903884] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1051.904098] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1051.904283] env[62070]: INFO nova.compute.manager [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1051.904530] env[62070]: DEBUG oslo.service.loopingcall [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1051.904729] env[62070]: DEBUG nova.compute.manager [-] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1051.904826] env[62070]: DEBUG nova.network.neutron [-] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1051.946846] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e1da95e1-e153-4483-b838-13cc8e0b9b7f tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "5cccd79d-d243-49db-8581-718dd594f3b3" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1052.011455] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122364, 'name': CreateVM_Task, 'duration_secs': 0.317827} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.011711] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1052.012706] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1052.012967] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.013518] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1052.013817] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff5add21-abcb-4648-8c3c-fbbdbb5fbcf7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.020302] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1052.020302] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52d48acd-c1b5-93f5-679c-b5aef5076d80" [ 1052.020302] env[62070]: _type = "Task" [ 1052.020302] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.029385] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52d48acd-c1b5-93f5-679c-b5aef5076d80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.267341] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122365, 'name': Rename_Task, 'duration_secs': 0.31102} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.267967] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1052.268262] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ab2cbc5-684f-42c0-abcd-6f56790ce41a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.274144] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1052.274144] env[62070]: value = "task-1122366" [ 1052.274144] env[62070]: _type = "Task" [ 1052.274144] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.281312] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122366, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.368070] env[62070]: DEBUG nova.scheduler.client.report [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1052.437529] env[62070]: DEBUG nova.network.neutron [req-7915d9a1-c437-4a28-abd8-7c836ed09f6b req-b4691832-ddae-4859-890e-e7dea7f57e4b service nova] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Updated VIF entry in instance network info cache for port 933a577b-8b0c-4c0d-ae12-372e4b70b7c9. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1052.437956] env[62070]: DEBUG nova.network.neutron [req-7915d9a1-c437-4a28-abd8-7c836ed09f6b req-b4691832-ddae-4859-890e-e7dea7f57e4b service nova] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Updating instance_info_cache with network_info: [{"id": "933a577b-8b0c-4c0d-ae12-372e4b70b7c9", "address": "fa:16:3e:34:74:6b", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap933a577b-8b", "ovs_interfaceid": "933a577b-8b0c-4c0d-ae12-372e4b70b7c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.533155] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52d48acd-c1b5-93f5-679c-b5aef5076d80, 'name': SearchDatastore_Task, 'duration_secs': 0.011074} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.533513] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1052.533785] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1052.534059] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1052.534234] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.534443] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1052.534742] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-959d083c-15d3-45ba-920b-5fb0a323c4a6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.543599] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1052.543820] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1052.544618] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47f23d75-3b4e-4daf-828a-7557a54c986c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.551528] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1052.551528] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52be261a-5851-c326-6711-81e75e86e8f9" [ 1052.551528] env[62070]: _type = "Task" [ 1052.551528] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.560145] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52be261a-5851-c326-6711-81e75e86e8f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.626470] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1052.662831] env[62070]: DEBUG nova.network.neutron [-] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.784543] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122366, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.875357] env[62070]: DEBUG oslo_concurrency.lockutils [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.792s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1052.877742] env[62070]: DEBUG oslo_concurrency.lockutils [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.332s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1052.879291] env[62070]: INFO nova.compute.claims [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1052.893642] env[62070]: INFO nova.scheduler.client.report [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Deleted allocations for instance d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2 [ 1052.940700] env[62070]: DEBUG oslo_concurrency.lockutils [req-7915d9a1-c437-4a28-abd8-7c836ed09f6b req-b4691832-ddae-4859-890e-e7dea7f57e4b service nova] Releasing lock "refresh_cache-fec23dd4-e956-42dd-b9a2-c8577f77cd81" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.016486] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e1da95e1-e153-4483-b838-13cc8e0b9b7f tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "5cccd79d-d243-49db-8581-718dd594f3b3" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1053.016797] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e1da95e1-e153-4483-b838-13cc8e0b9b7f tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "5cccd79d-d243-49db-8581-718dd594f3b3" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.017355] env[62070]: INFO nova.compute.manager [None req-e1da95e1-e153-4483-b838-13cc8e0b9b7f tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Attaching volume e25cbba2-7a89-41cf-8f0c-a38272da6f0a to /dev/sdb [ 1053.051949] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbdc4f0a-f2c0-4b9c-9c70-a7e610267f5a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.067874] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25ccbd6c-2a6d-4e29-a217-3ef2399bce17 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.070807] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52be261a-5851-c326-6711-81e75e86e8f9, 'name': SearchDatastore_Task, 'duration_secs': 0.011194} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.071938] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75884ca1-3134-4f78-995a-501741d51af7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.077817] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1053.077817] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52156fda-8864-1660-7254-81ae837b04aa" [ 1053.077817] env[62070]: _type = "Task" [ 1053.077817] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.085065] env[62070]: DEBUG nova.virt.block_device [None req-e1da95e1-e153-4483-b838-13cc8e0b9b7f tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Updating existing volume attachment record: 3d77cbfd-858b-43d0-9f9a-47e90ab00428 {{(pid=62070) _volume_attach /opt/stack/nova/nova/virt/block_device.py:679}} [ 1053.091514] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52156fda-8864-1660-7254-81ae837b04aa, 'name': SearchDatastore_Task, 'duration_secs': 0.011392} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.091705] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.091957] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] fec23dd4-e956-42dd-b9a2-c8577f77cd81/fec23dd4-e956-42dd-b9a2-c8577f77cd81.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1053.092239] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-81e73153-bd2e-4ba5-a25d-d087ec9720ce {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.099929] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1053.099929] env[62070]: value = "task-1122367" [ 1053.099929] env[62070]: _type = "Task" [ 1053.099929] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.107508] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122367, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.152893] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "33d04e59-da01-4ba3-ac42-ab93372a332d" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1053.153443] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "33d04e59-da01-4ba3-ac42-ab93372a332d" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.153728] env[62070]: DEBUG nova.compute.manager [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Going to confirm migration 5 {{(pid=62070) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1053.165256] env[62070]: INFO nova.compute.manager [-] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Took 1.26 seconds to deallocate network for instance. [ 1053.285259] env[62070]: DEBUG oslo_vmware.api [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122366, 'name': PowerOnVM_Task, 'duration_secs': 0.574212} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.285661] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1053.285942] env[62070]: INFO nova.compute.manager [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Took 8.60 seconds to spawn the instance on the hypervisor. [ 1053.286233] env[62070]: DEBUG nova.compute.manager [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1053.287073] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad448574-3051-4362-a89b-6836af49c430 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.400786] env[62070]: DEBUG oslo_concurrency.lockutils [None req-42037ebb-c440-40c3-b952-a50cbaace680 tempest-ServerDiskConfigTestJSON-1824612993 tempest-ServerDiskConfigTestJSON-1824612993-project-member] Lock "d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.967s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.611259] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122367, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.673705] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1053.694664] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "refresh_cache-33d04e59-da01-4ba3-ac42-ab93372a332d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1053.694868] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquired lock "refresh_cache-33d04e59-da01-4ba3-ac42-ab93372a332d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.695064] env[62070]: DEBUG nova.network.neutron [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1053.695290] env[62070]: DEBUG nova.objects.instance [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lazy-loading 'info_cache' on Instance uuid 33d04e59-da01-4ba3-ac42-ab93372a332d {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1053.754777] env[62070]: DEBUG nova.compute.manager [req-335f1ea5-1303-4b8f-a05b-98602b3e7dce req-d1e1b1b2-2a44-414d-90a8-fc4f770860cd service nova] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Received event network-vif-deleted-f30abb0e-6245-49cc-912a-4685dac5186b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1053.804998] env[62070]: INFO nova.compute.manager [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Took 17.03 seconds to build instance. [ 1054.075196] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31dd7f8f-49c9-4ec1-9c66-d324e3105ab2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.086030] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09372297-7359-41a3-a116-a52397b57d08 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.117869] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99048b5-f048-48ce-bd05-eb6c8099ece3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.125499] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122367, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.580194} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.127511] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] fec23dd4-e956-42dd-b9a2-c8577f77cd81/fec23dd4-e956-42dd-b9a2-c8577f77cd81.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1054.127802] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1054.128027] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8dbbe243-907d-4e6d-ac19-59a651dca82d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.130726] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23bf57ea-be4b-48cd-a2ce-a976cd4d505c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.144972] env[62070]: DEBUG nova.compute.provider_tree [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1054.148463] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1054.148463] env[62070]: value = "task-1122369" [ 1054.148463] env[62070]: _type = "Task" [ 1054.148463] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.156648] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122369, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.307228] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6eb3f18f-50a0-4a78-8a7f-eefd7473ff66 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "d8284a01-bbf6-4607-b2db-33bf2cd5457d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.545s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1054.650141] env[62070]: DEBUG nova.scheduler.client.report [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1054.662490] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122369, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.170338} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.662751] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1054.663697] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb7796b8-3b77-49e6-8b67-9e4ebbe60a12 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.685404] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] fec23dd4-e956-42dd-b9a2-c8577f77cd81/fec23dd4-e956-42dd-b9a2-c8577f77cd81.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1054.685729] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb54f6f7-00d5-4a2c-bc5b-c0fc9e90686d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.709603] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1054.709603] env[62070]: value = "task-1122370" [ 1054.709603] env[62070]: _type = "Task" [ 1054.709603] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.722314] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122370, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.915318] env[62070]: DEBUG nova.network.neutron [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Updating instance_info_cache with network_info: [{"id": "222fba36-759a-41f7-a82a-cb4047bd3725", "address": "fa:16:3e:7a:4c:a1", "network": {"id": "5ea0fffc-372c-450e-b27b-10959077d58f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1853458988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9191f0e6c2ee401abca64c0780e230bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3c995e9-7f2f-420c-880a-d60da6e708ad", "external-id": "nsx-vlan-transportzone-166", "segmentation_id": 166, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap222fba36-75", "ovs_interfaceid": "222fba36-759a-41f7-a82a-cb4047bd3725", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.158073] env[62070]: DEBUG oslo_concurrency.lockutils [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.280s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.159523] env[62070]: DEBUG nova.compute.manager [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1055.161259] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.985s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.164984] env[62070]: INFO nova.compute.claims [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1055.222098] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122370, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.418844] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Releasing lock "refresh_cache-33d04e59-da01-4ba3-ac42-ab93372a332d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1055.419231] env[62070]: DEBUG nova.objects.instance [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lazy-loading 'migration_context' on Instance uuid 33d04e59-da01-4ba3-ac42-ab93372a332d {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.494684] env[62070]: DEBUG nova.compute.manager [req-a4fddac5-edaa-4eb1-bd36-eb61e7e6ea88 req-050bcac5-2f33-4e4b-b611-afd603557a66 service nova] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Received event network-changed-629028b1-5fa6-4d6e-ba82-8c3c52f44a32 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1055.495154] env[62070]: DEBUG nova.compute.manager [req-a4fddac5-edaa-4eb1-bd36-eb61e7e6ea88 req-050bcac5-2f33-4e4b-b611-afd603557a66 service nova] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Refreshing instance network info cache due to event network-changed-629028b1-5fa6-4d6e-ba82-8c3c52f44a32. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1055.495485] env[62070]: DEBUG oslo_concurrency.lockutils [req-a4fddac5-edaa-4eb1-bd36-eb61e7e6ea88 req-050bcac5-2f33-4e4b-b611-afd603557a66 service nova] Acquiring lock "refresh_cache-d8284a01-bbf6-4607-b2db-33bf2cd5457d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1055.495741] env[62070]: DEBUG oslo_concurrency.lockutils [req-a4fddac5-edaa-4eb1-bd36-eb61e7e6ea88 req-050bcac5-2f33-4e4b-b611-afd603557a66 service nova] Acquired lock "refresh_cache-d8284a01-bbf6-4607-b2db-33bf2cd5457d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.496288] env[62070]: DEBUG nova.network.neutron [req-a4fddac5-edaa-4eb1-bd36-eb61e7e6ea88 req-050bcac5-2f33-4e4b-b611-afd603557a66 service nova] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Refreshing network info cache for port 629028b1-5fa6-4d6e-ba82-8c3c52f44a32 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1055.667399] env[62070]: DEBUG nova.compute.utils [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1055.670229] env[62070]: DEBUG nova.compute.manager [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1055.670424] env[62070]: DEBUG nova.network.neutron [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1055.722782] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122370, 'name': ReconfigVM_Task, 'duration_secs': 0.890052} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.723084] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Reconfigured VM instance instance-00000067 to attach disk [datastore1] fec23dd4-e956-42dd-b9a2-c8577f77cd81/fec23dd4-e956-42dd-b9a2-c8577f77cd81.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1055.723714] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-881e8804-5bd6-408a-9eae-b051b82531ef {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.727422] env[62070]: DEBUG nova.policy [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'db9baf29d0b5489da2657286bfd695c0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91e246e32f29422e90fae974cfee9d8f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 1055.730691] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1055.730691] env[62070]: value = "task-1122372" [ 1055.730691] env[62070]: _type = "Task" [ 1055.730691] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.738324] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122372, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.922823] env[62070]: DEBUG nova.objects.base [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Object Instance<33d04e59-da01-4ba3-ac42-ab93372a332d> lazy-loaded attributes: info_cache,migration_context {{(pid=62070) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1055.923308] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5994963-8a5c-48d0-ba90-26a948d8fd83 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.946039] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ce24e65-7fc3-4c63-8714-d9ed7c656a70 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.951880] env[62070]: DEBUG oslo_vmware.api [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 1055.951880] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5273c8ae-572b-688c-289c-08337863c32c" [ 1055.951880] env[62070]: _type = "Task" [ 1055.951880] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.960502] env[62070]: DEBUG oslo_vmware.api [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5273c8ae-572b-688c-289c-08337863c32c, 'name': SearchDatastore_Task} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.961096] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1055.981246] env[62070]: DEBUG nova.network.neutron [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Successfully created port: 89776caa-2b48-4f16-8206-436d2f129585 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1056.175093] env[62070]: DEBUG nova.compute.manager [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1056.248267] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122372, 'name': Rename_Task, 'duration_secs': 0.145689} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.249014] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1056.249341] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b003be29-72e5-4da8-bba6-5ef5d7c2993e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.256399] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1056.256399] env[62070]: value = "task-1122373" [ 1056.256399] env[62070]: _type = "Task" [ 1056.256399] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.264617] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122373, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.353155] env[62070]: DEBUG nova.network.neutron [req-a4fddac5-edaa-4eb1-bd36-eb61e7e6ea88 req-050bcac5-2f33-4e4b-b611-afd603557a66 service nova] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Updated VIF entry in instance network info cache for port 629028b1-5fa6-4d6e-ba82-8c3c52f44a32. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1056.353637] env[62070]: DEBUG nova.network.neutron [req-a4fddac5-edaa-4eb1-bd36-eb61e7e6ea88 req-050bcac5-2f33-4e4b-b611-afd603557a66 service nova] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Updating instance_info_cache with network_info: [{"id": "629028b1-5fa6-4d6e-ba82-8c3c52f44a32", "address": "fa:16:3e:8d:18:bf", "network": {"id": "0d81bd04-b549-4e1f-97a2-0a0b9391dd3f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-108214409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c91e5eeeeb1742f499b2edaf76a93a3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap629028b1-5f", "ovs_interfaceid": "629028b1-5fa6-4d6e-ba82-8c3c52f44a32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.416903] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac815e7f-2572-4cc2-b3b4-0898c3626c4b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.428051] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b8c9333-ff94-42c1-b067-203637d095e2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.460703] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59483ae5-7144-422c-a10a-9c786d951be9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.469266] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d85a1f57-ca99-4203-94a3-b5effc1e0420 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.481733] env[62070]: DEBUG nova.compute.provider_tree [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1056.766629] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122373, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.857273] env[62070]: DEBUG oslo_concurrency.lockutils [req-a4fddac5-edaa-4eb1-bd36-eb61e7e6ea88 req-050bcac5-2f33-4e4b-b611-afd603557a66 service nova] Releasing lock "refresh_cache-d8284a01-bbf6-4607-b2db-33bf2cd5457d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1056.986016] env[62070]: DEBUG nova.scheduler.client.report [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1057.185873] env[62070]: DEBUG nova.compute.manager [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1057.211998] env[62070]: DEBUG nova.virt.hardware [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1057.212286] env[62070]: DEBUG nova.virt.hardware [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1057.212447] env[62070]: DEBUG nova.virt.hardware [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1057.212649] env[62070]: DEBUG nova.virt.hardware [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1057.212809] env[62070]: DEBUG nova.virt.hardware [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1057.212956] env[62070]: DEBUG nova.virt.hardware [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1057.213187] env[62070]: DEBUG nova.virt.hardware [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1057.213351] env[62070]: DEBUG nova.virt.hardware [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1057.213576] env[62070]: DEBUG nova.virt.hardware [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1057.213790] env[62070]: DEBUG nova.virt.hardware [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1057.213973] env[62070]: DEBUG nova.virt.hardware [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1057.214853] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09cce0e8-0453-4cd8-93b4-ef87229880e6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.222459] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b4cd83d-bd6b-49c6-9bad-c429061e2034 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.266107] env[62070]: DEBUG oslo_vmware.api [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122373, 'name': PowerOnVM_Task, 'duration_secs': 0.64521} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.266383] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1057.266605] env[62070]: INFO nova.compute.manager [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Took 8.30 seconds to spawn the instance on the hypervisor. [ 1057.266795] env[62070]: DEBUG nova.compute.manager [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1057.267638] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f54041a5-999c-4617-bdf6-ded2d2779e2a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.412247] env[62070]: DEBUG nova.compute.manager [req-d49f7170-3400-4b4a-8d26-1070a4f4d56b req-cdc66ed4-7f92-4f30-8bfb-1f5468aaf33c service nova] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Received event network-vif-plugged-89776caa-2b48-4f16-8206-436d2f129585 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1057.412546] env[62070]: DEBUG oslo_concurrency.lockutils [req-d49f7170-3400-4b4a-8d26-1070a4f4d56b req-cdc66ed4-7f92-4f30-8bfb-1f5468aaf33c service nova] Acquiring lock "27d9b478-7ebb-4313-a314-679ca0292086-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.413433] env[62070]: DEBUG oslo_concurrency.lockutils [req-d49f7170-3400-4b4a-8d26-1070a4f4d56b req-cdc66ed4-7f92-4f30-8bfb-1f5468aaf33c service nova] Lock "27d9b478-7ebb-4313-a314-679ca0292086-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.413541] env[62070]: DEBUG oslo_concurrency.lockutils [req-d49f7170-3400-4b4a-8d26-1070a4f4d56b req-cdc66ed4-7f92-4f30-8bfb-1f5468aaf33c service nova] Lock "27d9b478-7ebb-4313-a314-679ca0292086-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.413761] env[62070]: DEBUG nova.compute.manager [req-d49f7170-3400-4b4a-8d26-1070a4f4d56b req-cdc66ed4-7f92-4f30-8bfb-1f5468aaf33c service nova] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] No waiting events found dispatching network-vif-plugged-89776caa-2b48-4f16-8206-436d2f129585 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1057.414048] env[62070]: WARNING nova.compute.manager [req-d49f7170-3400-4b4a-8d26-1070a4f4d56b req-cdc66ed4-7f92-4f30-8bfb-1f5468aaf33c service nova] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Received unexpected event network-vif-plugged-89776caa-2b48-4f16-8206-436d2f129585 for instance with vm_state building and task_state spawning. [ 1057.491512] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.330s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.492074] env[62070]: DEBUG nova.compute.manager [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1057.494874] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.869s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.495106] env[62070]: DEBUG nova.objects.instance [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lazy-loading 'pci_requests' on Instance uuid 53a1791d-38fd-4721-b82c-2f0922348300 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1057.637282] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1da95e1-e153-4483-b838-13cc8e0b9b7f tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Volume attach. Driver type: vmdk {{(pid=62070) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1057.637539] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1da95e1-e153-4483-b838-13cc8e0b9b7f tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245502', 'volume_id': 'e25cbba2-7a89-41cf-8f0c-a38272da6f0a', 'name': 'volume-e25cbba2-7a89-41cf-8f0c-a38272da6f0a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5cccd79d-d243-49db-8581-718dd594f3b3', 'attached_at': '', 'detached_at': '', 'volume_id': 'e25cbba2-7a89-41cf-8f0c-a38272da6f0a', 'serial': 'e25cbba2-7a89-41cf-8f0c-a38272da6f0a'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1057.640410] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba6766fe-a92e-44d6-b40e-c8bedead89b7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.666713] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187bba0e-ba4b-4a4d-96ed-90168e1738a4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.694562] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1da95e1-e153-4483-b838-13cc8e0b9b7f tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] volume-e25cbba2-7a89-41cf-8f0c-a38272da6f0a/volume-e25cbba2-7a89-41cf-8f0c-a38272da6f0a.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1057.694699] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1949c861-d38d-46e1-a21b-72b862af0cd7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.713981] env[62070]: DEBUG oslo_vmware.api [None req-e1da95e1-e153-4483-b838-13cc8e0b9b7f tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 1057.713981] env[62070]: value = "task-1122375" [ 1057.713981] env[62070]: _type = "Task" [ 1057.713981] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.722532] env[62070]: DEBUG oslo_vmware.api [None req-e1da95e1-e153-4483-b838-13cc8e0b9b7f tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122375, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.723343] env[62070]: DEBUG nova.network.neutron [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Successfully updated port: 89776caa-2b48-4f16-8206-436d2f129585 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1057.786483] env[62070]: INFO nova.compute.manager [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Took 14.38 seconds to build instance. [ 1058.001497] env[62070]: DEBUG nova.compute.utils [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1058.005363] env[62070]: DEBUG nova.objects.instance [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lazy-loading 'numa_topology' on Instance uuid 53a1791d-38fd-4721-b82c-2f0922348300 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1058.006677] env[62070]: DEBUG nova.compute.manager [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1058.006902] env[62070]: DEBUG nova.network.neutron [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1058.050598] env[62070]: DEBUG nova.policy [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '234556dc76884adb8859102c456672f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca25fba006b740f2a86fe10e4abe9400', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 1058.225079] env[62070]: DEBUG oslo_vmware.api [None req-e1da95e1-e153-4483-b838-13cc8e0b9b7f tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122375, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.226696] env[62070]: DEBUG oslo_concurrency.lockutils [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "refresh_cache-27d9b478-7ebb-4313-a314-679ca0292086" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1058.226847] env[62070]: DEBUG oslo_concurrency.lockutils [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired lock "refresh_cache-27d9b478-7ebb-4313-a314-679ca0292086" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.226987] env[62070]: DEBUG nova.network.neutron [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1058.287918] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8668c353-fb38-4717-97b5-397aa4b133d7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "fec23dd4-e956-42dd-b9a2-c8577f77cd81" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.890s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1058.353523] env[62070]: DEBUG nova.network.neutron [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Successfully created port: 0eb3c7d4-224c-4d94-9450-0623a1e1b162 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1058.445392] env[62070]: DEBUG nova.compute.manager [req-ac18bbc3-656b-4409-a945-03a3e36c6784 req-27289a47-7bdc-4ba8-89dd-485653ac330c service nova] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Received event network-changed-933a577b-8b0c-4c0d-ae12-372e4b70b7c9 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1058.445648] env[62070]: DEBUG nova.compute.manager [req-ac18bbc3-656b-4409-a945-03a3e36c6784 req-27289a47-7bdc-4ba8-89dd-485653ac330c service nova] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Refreshing instance network info cache due to event network-changed-933a577b-8b0c-4c0d-ae12-372e4b70b7c9. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1058.445910] env[62070]: DEBUG oslo_concurrency.lockutils [req-ac18bbc3-656b-4409-a945-03a3e36c6784 req-27289a47-7bdc-4ba8-89dd-485653ac330c service nova] Acquiring lock "refresh_cache-fec23dd4-e956-42dd-b9a2-c8577f77cd81" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1058.446102] env[62070]: DEBUG oslo_concurrency.lockutils [req-ac18bbc3-656b-4409-a945-03a3e36c6784 req-27289a47-7bdc-4ba8-89dd-485653ac330c service nova] Acquired lock "refresh_cache-fec23dd4-e956-42dd-b9a2-c8577f77cd81" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.446354] env[62070]: DEBUG nova.network.neutron [req-ac18bbc3-656b-4409-a945-03a3e36c6784 req-27289a47-7bdc-4ba8-89dd-485653ac330c service nova] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Refreshing network info cache for port 933a577b-8b0c-4c0d-ae12-372e4b70b7c9 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1058.508057] env[62070]: DEBUG nova.compute.manager [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1058.512512] env[62070]: INFO nova.compute.claims [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1058.724703] env[62070]: DEBUG oslo_vmware.api [None req-e1da95e1-e153-4483-b838-13cc8e0b9b7f tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122375, 'name': ReconfigVM_Task, 'duration_secs': 0.772927} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.724973] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1da95e1-e153-4483-b838-13cc8e0b9b7f tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Reconfigured VM instance instance-0000005f to attach disk [datastore2] volume-e25cbba2-7a89-41cf-8f0c-a38272da6f0a/volume-e25cbba2-7a89-41cf-8f0c-a38272da6f0a.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1058.731505] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3453aa5-6456-412d-8030-a8cd390ffb6d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.746621] env[62070]: DEBUG oslo_vmware.api [None req-e1da95e1-e153-4483-b838-13cc8e0b9b7f tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 1058.746621] env[62070]: value = "task-1122376" [ 1058.746621] env[62070]: _type = "Task" [ 1058.746621] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.754281] env[62070]: DEBUG oslo_vmware.api [None req-e1da95e1-e153-4483-b838-13cc8e0b9b7f tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122376, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.783942] env[62070]: DEBUG nova.network.neutron [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1058.993016] env[62070]: DEBUG nova.network.neutron [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Updating instance_info_cache with network_info: [{"id": "89776caa-2b48-4f16-8206-436d2f129585", "address": "fa:16:3e:9d:37:4b", "network": {"id": "516790be-56b8-409d-b1c0-a8683a45a9ec", "bridge": "br-int", "label": "tempest-ServersTestJSON-693737631-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91e246e32f29422e90fae974cfee9d8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89776caa-2b", "ovs_interfaceid": "89776caa-2b48-4f16-8206-436d2f129585", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.258721] env[62070]: DEBUG oslo_vmware.api [None req-e1da95e1-e153-4483-b838-13cc8e0b9b7f tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122376, 'name': ReconfigVM_Task, 'duration_secs': 0.277536} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.259075] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1da95e1-e153-4483-b838-13cc8e0b9b7f tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245502', 'volume_id': 'e25cbba2-7a89-41cf-8f0c-a38272da6f0a', 'name': 'volume-e25cbba2-7a89-41cf-8f0c-a38272da6f0a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5cccd79d-d243-49db-8581-718dd594f3b3', 'attached_at': '', 'detached_at': '', 'volume_id': 'e25cbba2-7a89-41cf-8f0c-a38272da6f0a', 'serial': 'e25cbba2-7a89-41cf-8f0c-a38272da6f0a'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1059.271650] env[62070]: DEBUG nova.network.neutron [req-ac18bbc3-656b-4409-a945-03a3e36c6784 req-27289a47-7bdc-4ba8-89dd-485653ac330c service nova] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Updated VIF entry in instance network info cache for port 933a577b-8b0c-4c0d-ae12-372e4b70b7c9. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1059.272030] env[62070]: DEBUG nova.network.neutron [req-ac18bbc3-656b-4409-a945-03a3e36c6784 req-27289a47-7bdc-4ba8-89dd-485653ac330c service nova] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Updating instance_info_cache with network_info: [{"id": "933a577b-8b0c-4c0d-ae12-372e4b70b7c9", "address": "fa:16:3e:34:74:6b", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap933a577b-8b", "ovs_interfaceid": "933a577b-8b0c-4c0d-ae12-372e4b70b7c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.437777] env[62070]: DEBUG nova.compute.manager [req-5bf8ee1e-8543-4c71-9a18-392759380b66 req-754ef925-585b-4068-afbd-2cad5a77132c service nova] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Received event network-changed-89776caa-2b48-4f16-8206-436d2f129585 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1059.437897] env[62070]: DEBUG nova.compute.manager [req-5bf8ee1e-8543-4c71-9a18-392759380b66 req-754ef925-585b-4068-afbd-2cad5a77132c service nova] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Refreshing instance network info cache due to event network-changed-89776caa-2b48-4f16-8206-436d2f129585. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1059.438104] env[62070]: DEBUG oslo_concurrency.lockutils [req-5bf8ee1e-8543-4c71-9a18-392759380b66 req-754ef925-585b-4068-afbd-2cad5a77132c service nova] Acquiring lock "refresh_cache-27d9b478-7ebb-4313-a314-679ca0292086" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1059.495903] env[62070]: DEBUG oslo_concurrency.lockutils [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Releasing lock "refresh_cache-27d9b478-7ebb-4313-a314-679ca0292086" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1059.496262] env[62070]: DEBUG nova.compute.manager [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Instance network_info: |[{"id": "89776caa-2b48-4f16-8206-436d2f129585", "address": "fa:16:3e:9d:37:4b", "network": {"id": "516790be-56b8-409d-b1c0-a8683a45a9ec", "bridge": "br-int", "label": "tempest-ServersTestJSON-693737631-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91e246e32f29422e90fae974cfee9d8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89776caa-2b", "ovs_interfaceid": "89776caa-2b48-4f16-8206-436d2f129585", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1059.496603] env[62070]: DEBUG oslo_concurrency.lockutils [req-5bf8ee1e-8543-4c71-9a18-392759380b66 req-754ef925-585b-4068-afbd-2cad5a77132c service nova] Acquired lock "refresh_cache-27d9b478-7ebb-4313-a314-679ca0292086" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.496788] env[62070]: DEBUG nova.network.neutron [req-5bf8ee1e-8543-4c71-9a18-392759380b66 req-754ef925-585b-4068-afbd-2cad5a77132c service nova] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Refreshing network info cache for port 89776caa-2b48-4f16-8206-436d2f129585 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1059.498137] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:37:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '359850cc-b061-4c9c-a61c-eb42e0f7c359', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89776caa-2b48-4f16-8206-436d2f129585', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1059.507581] env[62070]: DEBUG oslo.service.loopingcall [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1059.508795] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1059.509043] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ff265078-a8e7-434d-83da-d298926028c6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.524557] env[62070]: DEBUG nova.compute.manager [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1059.535189] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1059.535189] env[62070]: value = "task-1122377" [ 1059.535189] env[62070]: _type = "Task" [ 1059.535189] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.545016] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122377, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.548412] env[62070]: DEBUG nova.virt.hardware [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1059.548672] env[62070]: DEBUG nova.virt.hardware [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1059.548830] env[62070]: DEBUG nova.virt.hardware [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1059.549037] env[62070]: DEBUG nova.virt.hardware [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1059.549194] env[62070]: DEBUG nova.virt.hardware [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1059.549372] env[62070]: DEBUG nova.virt.hardware [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1059.549820] env[62070]: DEBUG nova.virt.hardware [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1059.549820] env[62070]: DEBUG nova.virt.hardware [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1059.550105] env[62070]: DEBUG nova.virt.hardware [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1059.550105] env[62070]: DEBUG nova.virt.hardware [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1059.551405] env[62070]: DEBUG nova.virt.hardware [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1059.551405] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b23e0e86-dab1-4298-9fa2-f3cb68948874 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.560935] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d45b1ce-b6c8-497a-9474-c4b2f3f982f7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.743022] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcedb26f-c82d-4463-992e-147e2a63cdfb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.748995] env[62070]: DEBUG nova.compute.manager [req-1f6c7e25-ebc2-4383-9e70-b0871db396d3 req-b0827d68-a5ac-4dbe-8ba9-c85cb508ce92 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Received event network-vif-plugged-0eb3c7d4-224c-4d94-9450-0623a1e1b162 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1059.749124] env[62070]: DEBUG oslo_concurrency.lockutils [req-1f6c7e25-ebc2-4383-9e70-b0871db396d3 req-b0827d68-a5ac-4dbe-8ba9-c85cb508ce92 service nova] Acquiring lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.749294] env[62070]: DEBUG oslo_concurrency.lockutils [req-1f6c7e25-ebc2-4383-9e70-b0871db396d3 req-b0827d68-a5ac-4dbe-8ba9-c85cb508ce92 service nova] Lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.749470] env[62070]: DEBUG oslo_concurrency.lockutils [req-1f6c7e25-ebc2-4383-9e70-b0871db396d3 req-b0827d68-a5ac-4dbe-8ba9-c85cb508ce92 service nova] Lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.749652] env[62070]: DEBUG nova.compute.manager [req-1f6c7e25-ebc2-4383-9e70-b0871db396d3 req-b0827d68-a5ac-4dbe-8ba9-c85cb508ce92 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] No waiting events found dispatching network-vif-plugged-0eb3c7d4-224c-4d94-9450-0623a1e1b162 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1059.749812] env[62070]: WARNING nova.compute.manager [req-1f6c7e25-ebc2-4383-9e70-b0871db396d3 req-b0827d68-a5ac-4dbe-8ba9-c85cb508ce92 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Received unexpected event network-vif-plugged-0eb3c7d4-224c-4d94-9450-0623a1e1b162 for instance with vm_state building and task_state spawning. [ 1059.753095] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb9550a6-15d7-488a-955f-79383836be86 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.786487] env[62070]: DEBUG oslo_concurrency.lockutils [req-ac18bbc3-656b-4409-a945-03a3e36c6784 req-27289a47-7bdc-4ba8-89dd-485653ac330c service nova] Releasing lock "refresh_cache-fec23dd4-e956-42dd-b9a2-c8577f77cd81" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1059.787919] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c7c031c-d9d0-406e-98c8-73bbbf1ce753 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.798302] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80622583-c936-47aa-91b2-852f45e68111 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.819384] env[62070]: DEBUG nova.compute.provider_tree [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1060.047806] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122377, 'name': CreateVM_Task, 'duration_secs': 0.485252} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.048083] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1060.048712] env[62070]: DEBUG oslo_concurrency.lockutils [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1060.048934] env[62070]: DEBUG oslo_concurrency.lockutils [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.049250] env[62070]: DEBUG oslo_concurrency.lockutils [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1060.049538] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44c2ab93-5ec3-4d89-9e06-cba1c7ca6d26 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.054322] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 1060.054322] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52f05b0c-d5f7-6dbb-9f05-86f7774dfe3f" [ 1060.054322] env[62070]: _type = "Task" [ 1060.054322] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.062069] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52f05b0c-d5f7-6dbb-9f05-86f7774dfe3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.273665] env[62070]: DEBUG nova.network.neutron [req-5bf8ee1e-8543-4c71-9a18-392759380b66 req-754ef925-585b-4068-afbd-2cad5a77132c service nova] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Updated VIF entry in instance network info cache for port 89776caa-2b48-4f16-8206-436d2f129585. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1060.274101] env[62070]: DEBUG nova.network.neutron [req-5bf8ee1e-8543-4c71-9a18-392759380b66 req-754ef925-585b-4068-afbd-2cad5a77132c service nova] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Updating instance_info_cache with network_info: [{"id": "89776caa-2b48-4f16-8206-436d2f129585", "address": "fa:16:3e:9d:37:4b", "network": {"id": "516790be-56b8-409d-b1c0-a8683a45a9ec", "bridge": "br-int", "label": "tempest-ServersTestJSON-693737631-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91e246e32f29422e90fae974cfee9d8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89776caa-2b", "ovs_interfaceid": "89776caa-2b48-4f16-8206-436d2f129585", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.309352] env[62070]: DEBUG nova.network.neutron [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Successfully updated port: 0eb3c7d4-224c-4d94-9450-0623a1e1b162 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1060.322945] env[62070]: DEBUG nova.scheduler.client.report [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1060.333876] env[62070]: DEBUG nova.objects.instance [None req-e1da95e1-e153-4483-b838-13cc8e0b9b7f tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lazy-loading 'flavor' on Instance uuid 5cccd79d-d243-49db-8581-718dd594f3b3 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1060.567444] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52f05b0c-d5f7-6dbb-9f05-86f7774dfe3f, 'name': SearchDatastore_Task, 'duration_secs': 0.011388} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.567956] env[62070]: DEBUG oslo_concurrency.lockutils [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1060.568352] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1060.568842] env[62070]: DEBUG oslo_concurrency.lockutils [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1060.569046] env[62070]: DEBUG oslo_concurrency.lockutils [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.569378] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1060.569821] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8c81bc8-2644-45e9-838d-ae9f1644ac15 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.586877] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1060.587224] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1060.588433] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-904ec2f6-84ac-4c23-b10f-ccf9dd3143fe {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.594872] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 1060.594872] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52fa50e2-a885-675f-2420-55436421f348" [ 1060.594872] env[62070]: _type = "Task" [ 1060.594872] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.602690] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52fa50e2-a885-675f-2420-55436421f348, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.777760] env[62070]: DEBUG oslo_concurrency.lockutils [req-5bf8ee1e-8543-4c71-9a18-392759380b66 req-754ef925-585b-4068-afbd-2cad5a77132c service nova] Releasing lock "refresh_cache-27d9b478-7ebb-4313-a314-679ca0292086" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1060.813131] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "refresh_cache-20c4fabc-fc9b-49c7-ab28-fa092ad66038" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1060.813334] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquired lock "refresh_cache-20c4fabc-fc9b-49c7-ab28-fa092ad66038" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.813448] env[62070]: DEBUG nova.network.neutron [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1060.828082] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.333s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1060.830361] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.157s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1060.830593] env[62070]: DEBUG nova.objects.instance [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Lazy-loading 'resources' on Instance uuid b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1060.838679] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e1da95e1-e153-4483-b838-13cc8e0b9b7f tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "5cccd79d-d243-49db-8581-718dd594f3b3" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.822s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1060.859433] env[62070]: INFO nova.network.neutron [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Updating port 2c6759e4-b6e7-4b67-b06d-d38d6043d3b2 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1061.106178] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52fa50e2-a885-675f-2420-55436421f348, 'name': SearchDatastore_Task, 'duration_secs': 0.026508} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.107972] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2244749c-3c74-4f4a-ab4b-16d4829c776a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.112419] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 1061.112419] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5245e714-a1a8-cd88-f7c9-8ac5571bf74f" [ 1061.112419] env[62070]: _type = "Task" [ 1061.112419] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.120977] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5245e714-a1a8-cd88-f7c9-8ac5571bf74f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.354710] env[62070]: DEBUG nova.network.neutron [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1061.410932] env[62070]: DEBUG oslo_concurrency.lockutils [None req-143965fc-7e83-4cd2-ba21-818f43f3748e tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "5cccd79d-d243-49db-8581-718dd594f3b3" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.411282] env[62070]: DEBUG oslo_concurrency.lockutils [None req-143965fc-7e83-4cd2-ba21-818f43f3748e tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "5cccd79d-d243-49db-8581-718dd594f3b3" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1061.492390] env[62070]: DEBUG nova.network.neutron [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Updating instance_info_cache with network_info: [{"id": "0eb3c7d4-224c-4d94-9450-0623a1e1b162", "address": "fa:16:3e:d9:a2:6e", "network": {"id": "b9ef8f6c-bbd6-409d-a591-ad584e5e028f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-599171324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca25fba006b740f2a86fe10e4abe9400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0eb3c7d4-22", "ovs_interfaceid": "0eb3c7d4-224c-4d94-9450-0623a1e1b162", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.513939] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dbe2824-53ca-4e56-a9a2-c52e6256e881 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.522584] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deca14c8-5a1c-445d-b973-c0531f0339b5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.553539] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d101c24a-cccc-4326-9967-a40d1e958bf7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.562124] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87765b4a-5715-43d4-8ee0-d1cdbda6574e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.576357] env[62070]: DEBUG nova.compute.provider_tree [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1061.622577] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5245e714-a1a8-cd88-f7c9-8ac5571bf74f, 'name': SearchDatastore_Task, 'duration_secs': 0.021667} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.622877] env[62070]: DEBUG oslo_concurrency.lockutils [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1061.623152] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 27d9b478-7ebb-4313-a314-679ca0292086/27d9b478-7ebb-4313-a314-679ca0292086.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1061.623485] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6905f27b-e0c8-438a-bf54-03ccc31cc6aa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.630780] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 1061.630780] env[62070]: value = "task-1122379" [ 1061.630780] env[62070]: _type = "Task" [ 1061.630780] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.642309] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122379, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.915584] env[62070]: DEBUG nova.compute.utils [None req-143965fc-7e83-4cd2-ba21-818f43f3748e tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1061.995273] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Releasing lock "refresh_cache-20c4fabc-fc9b-49c7-ab28-fa092ad66038" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1061.995614] env[62070]: DEBUG nova.compute.manager [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Instance network_info: |[{"id": "0eb3c7d4-224c-4d94-9450-0623a1e1b162", "address": "fa:16:3e:d9:a2:6e", "network": {"id": "b9ef8f6c-bbd6-409d-a591-ad584e5e028f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-599171324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca25fba006b740f2a86fe10e4abe9400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0eb3c7d4-22", "ovs_interfaceid": "0eb3c7d4-224c-4d94-9450-0623a1e1b162", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1061.996077] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:a2:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa8c2f93-f287-41b3-adb6-4942a7ea2a0b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0eb3c7d4-224c-4d94-9450-0623a1e1b162', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1062.004427] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Creating folder: Project (ca25fba006b740f2a86fe10e4abe9400). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1062.004765] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3b6ee24c-3c56-46f5-9b71-3a79a9687b20 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.021339] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Created folder: Project (ca25fba006b740f2a86fe10e4abe9400) in parent group-v245319. [ 1062.021612] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Creating folder: Instances. Parent ref: group-v245505. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1062.021838] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-53086572-cb58-4e45-97c6-b842e0311eb3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.036322] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Created folder: Instances in parent group-v245505. [ 1062.036642] env[62070]: DEBUG oslo.service.loopingcall [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1062.036898] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1062.037145] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fafcefbb-2043-4875-83e1-2eef092feb5f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.060780] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1062.060780] env[62070]: value = "task-1122383" [ 1062.060780] env[62070]: _type = "Task" [ 1062.060780] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.068738] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122383, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.079956] env[62070]: DEBUG nova.scheduler.client.report [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1062.141394] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122379, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.444574} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.141759] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 27d9b478-7ebb-4313-a314-679ca0292086/27d9b478-7ebb-4313-a314-679ca0292086.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1062.141892] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1062.142115] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ad41756a-55af-49ce-a8a2-218c77e36341 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.149550] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 1062.149550] env[62070]: value = "task-1122384" [ 1062.149550] env[62070]: _type = "Task" [ 1062.149550] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.157680] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122384, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.357704] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquiring lock "refresh_cache-53a1791d-38fd-4721-b82c-2f0922348300" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1062.357854] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquired lock "refresh_cache-53a1791d-38fd-4721-b82c-2f0922348300" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.358058] env[62070]: DEBUG nova.network.neutron [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1062.418741] env[62070]: DEBUG oslo_concurrency.lockutils [None req-143965fc-7e83-4cd2-ba21-818f43f3748e tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "5cccd79d-d243-49db-8581-718dd594f3b3" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.571257] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122383, 'name': CreateVM_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.585332] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.755s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.587623] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 6.627s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.604212] env[62070]: INFO nova.scheduler.client.report [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Deleted allocations for instance b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae [ 1062.661158] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122384, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.314306} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.661474] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1062.662241] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b972836-b436-40c3-9d3a-cf64173ec3f4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.686028] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] 27d9b478-7ebb-4313-a314-679ca0292086/27d9b478-7ebb-4313-a314-679ca0292086.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1062.686358] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea2f2377-1c63-40e4-be17-2d5eff958726 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.707191] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 1062.707191] env[62070]: value = "task-1122385" [ 1062.707191] env[62070]: _type = "Task" [ 1062.707191] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.715467] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122385, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.073067] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122383, 'name': CreateVM_Task, 'duration_secs': 0.919378} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.073356] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1063.074054] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1063.074422] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.074623] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1063.074893] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7c0339c-5c1f-48ea-8f0a-3b65790a7665 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.080705] env[62070]: DEBUG oslo_vmware.api [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1063.080705] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52d95135-4f98-c10d-d9a1-38207f6e98fd" [ 1063.080705] env[62070]: _type = "Task" [ 1063.080705] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.081658] env[62070]: DEBUG nova.network.neutron [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Updating instance_info_cache with network_info: [{"id": "2c6759e4-b6e7-4b67-b06d-d38d6043d3b2", "address": "fa:16:3e:11:96:b8", "network": {"id": "6a62b79f-a98b-4518-86cb-facc7b77da1d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2107556336-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "772f7fcee5f44b899b6df797e1ed5ddd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c6759e4-b6", "ovs_interfaceid": "2c6759e4-b6e7-4b67-b06d-d38d6043d3b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.096181] env[62070]: DEBUG oslo_vmware.api [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52d95135-4f98-c10d-d9a1-38207f6e98fd, 'name': SearchDatastore_Task, 'duration_secs': 0.010997} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.097037] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1063.097291] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1063.097528] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1063.097681] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.097863] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1063.098446] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb34b692-54fb-4c0f-9880-bd5b68998ed0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.107137] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1063.107468] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1063.112427] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7f76950-7c5a-4401-a1c6-4c0dc1044577 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.114928] env[62070]: DEBUG oslo_concurrency.lockutils [None req-3057a186-31ce-49a6-8c86-ad7afb44d558 tempest-SecurityGroupsTestJSON-2059552961 tempest-SecurityGroupsTestJSON-2059552961-project-member] Lock "b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.331s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.121133] env[62070]: DEBUG oslo_vmware.api [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1063.121133] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52cc31ac-8872-8ffc-ee3c-fb12f7d53405" [ 1063.121133] env[62070]: _type = "Task" [ 1063.121133] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.129073] env[62070]: DEBUG oslo_vmware.api [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52cc31ac-8872-8ffc-ee3c-fb12f7d53405, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.147918] env[62070]: DEBUG nova.compute.manager [req-d5ee948d-fa9f-4a99-9104-4af49cc320d9 req-d897ee6f-ac2c-4a94-b7de-c1118b4ba599 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Received event network-changed-0eb3c7d4-224c-4d94-9450-0623a1e1b162 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1063.148161] env[62070]: DEBUG nova.compute.manager [req-d5ee948d-fa9f-4a99-9104-4af49cc320d9 req-d897ee6f-ac2c-4a94-b7de-c1118b4ba599 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Refreshing instance network info cache due to event network-changed-0eb3c7d4-224c-4d94-9450-0623a1e1b162. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1063.148410] env[62070]: DEBUG oslo_concurrency.lockutils [req-d5ee948d-fa9f-4a99-9104-4af49cc320d9 req-d897ee6f-ac2c-4a94-b7de-c1118b4ba599 service nova] Acquiring lock "refresh_cache-20c4fabc-fc9b-49c7-ab28-fa092ad66038" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1063.148495] env[62070]: DEBUG oslo_concurrency.lockutils [req-d5ee948d-fa9f-4a99-9104-4af49cc320d9 req-d897ee6f-ac2c-4a94-b7de-c1118b4ba599 service nova] Acquired lock "refresh_cache-20c4fabc-fc9b-49c7-ab28-fa092ad66038" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.148750] env[62070]: DEBUG nova.network.neutron [req-d5ee948d-fa9f-4a99-9104-4af49cc320d9 req-d897ee6f-ac2c-4a94-b7de-c1118b4ba599 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Refreshing network info cache for port 0eb3c7d4-224c-4d94-9450-0623a1e1b162 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1063.219988] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122385, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.277906] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c14a4c75-7cb3-4e54-ad32-39de38cd7778 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.287178] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47e57a03-8c91-41b3-9cae-04b611922b4e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.318726] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be391b27-6493-49f0-aa22-094f960c0f38 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.326868] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89845088-9517-4d32-ba99-c2f4d234c8ef {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.341998] env[62070]: DEBUG nova.compute.provider_tree [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1063.488236] env[62070]: DEBUG oslo_concurrency.lockutils [None req-143965fc-7e83-4cd2-ba21-818f43f3748e tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "5cccd79d-d243-49db-8581-718dd594f3b3" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.488600] env[62070]: DEBUG oslo_concurrency.lockutils [None req-143965fc-7e83-4cd2-ba21-818f43f3748e tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "5cccd79d-d243-49db-8581-718dd594f3b3" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.488860] env[62070]: INFO nova.compute.manager [None req-143965fc-7e83-4cd2-ba21-818f43f3748e tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Attaching volume 73eaeb2a-8061-49f0-ba0e-4c1fc41e031f to /dev/sdc [ 1063.523947] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2125c9d-0d3a-403a-bd0e-6ea50a4cdaf1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.532053] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebf0e882-5b4b-4565-b536-aa807cb24454 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.547061] env[62070]: DEBUG nova.virt.block_device [None req-143965fc-7e83-4cd2-ba21-818f43f3748e tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Updating existing volume attachment record: f5b2097b-49a8-466b-8030-e56343d996ea {{(pid=62070) _volume_attach /opt/stack/nova/nova/virt/block_device.py:679}} [ 1063.589261] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Releasing lock "refresh_cache-53a1791d-38fd-4721-b82c-2f0922348300" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1063.616587] env[62070]: DEBUG nova.virt.hardware [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='1484f5f085aacb9db3da0b75940c0786',container_format='bare',created_at=2024-10-03T09:31:09Z,direct_url=,disk_format='vmdk',id=24ed4c28-b352-4867-857b-17f9624cc455,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1838606362-shelved',owner='772f7fcee5f44b899b6df797e1ed5ddd',properties=ImageMetaProps,protected=,size=31668736,status='active',tags=,updated_at=2024-10-03T09:31:26Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1063.616839] env[62070]: DEBUG nova.virt.hardware [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1063.617014] env[62070]: DEBUG nova.virt.hardware [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1063.617203] env[62070]: DEBUG nova.virt.hardware [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1063.617353] env[62070]: DEBUG nova.virt.hardware [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1063.617503] env[62070]: DEBUG nova.virt.hardware [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1063.617711] env[62070]: DEBUG nova.virt.hardware [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1063.617871] env[62070]: DEBUG nova.virt.hardware [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1063.618053] env[62070]: DEBUG nova.virt.hardware [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1063.618221] env[62070]: DEBUG nova.virt.hardware [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1063.618483] env[62070]: DEBUG nova.virt.hardware [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1063.619588] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f123013-d7ce-4cad-962b-6438443b1a73 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.630732] env[62070]: DEBUG oslo_vmware.api [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52cc31ac-8872-8ffc-ee3c-fb12f7d53405, 'name': SearchDatastore_Task, 'duration_secs': 0.009666} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.633255] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d185d95-6a4f-4cad-b996-fecdcfaedc04 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.639037] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-288439fa-f50d-4960-a212-054077c589c5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.647418] env[62070]: DEBUG oslo_vmware.api [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1063.647418] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52072e97-5b23-83ec-03ef-c6847bb9dd00" [ 1063.647418] env[62070]: _type = "Task" [ 1063.647418] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.659069] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:96:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78e1ebb0-0130-446b-bf73-a0e59bbb95cc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c6759e4-b6e7-4b67-b06d-d38d6043d3b2', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1063.666641] env[62070]: DEBUG oslo.service.loopingcall [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1063.670158] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1063.670732] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7c0e0761-a99e-4ea2-98b0-4ce9d7d0d4d9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.693575] env[62070]: DEBUG oslo_vmware.api [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52072e97-5b23-83ec-03ef-c6847bb9dd00, 'name': SearchDatastore_Task, 'duration_secs': 0.010666} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.697944] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1063.697944] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 20c4fabc-fc9b-49c7-ab28-fa092ad66038/20c4fabc-fc9b-49c7-ab28-fa092ad66038.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1063.697944] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1063.697944] env[62070]: value = "task-1122386" [ 1063.697944] env[62070]: _type = "Task" [ 1063.697944] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.697944] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8ea5dd02-7814-4824-952b-6bbbe9460d36 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.706567] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122386, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.707984] env[62070]: DEBUG oslo_vmware.api [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1063.707984] env[62070]: value = "task-1122387" [ 1063.707984] env[62070]: _type = "Task" [ 1063.707984] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.721157] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122385, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.724079] env[62070]: DEBUG oslo_vmware.api [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122387, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.845394] env[62070]: DEBUG nova.scheduler.client.report [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1063.913207] env[62070]: DEBUG nova.network.neutron [req-d5ee948d-fa9f-4a99-9104-4af49cc320d9 req-d897ee6f-ac2c-4a94-b7de-c1118b4ba599 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Updated VIF entry in instance network info cache for port 0eb3c7d4-224c-4d94-9450-0623a1e1b162. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1063.913207] env[62070]: DEBUG nova.network.neutron [req-d5ee948d-fa9f-4a99-9104-4af49cc320d9 req-d897ee6f-ac2c-4a94-b7de-c1118b4ba599 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Updating instance_info_cache with network_info: [{"id": "0eb3c7d4-224c-4d94-9450-0623a1e1b162", "address": "fa:16:3e:d9:a2:6e", "network": {"id": "b9ef8f6c-bbd6-409d-a591-ad584e5e028f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-599171324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca25fba006b740f2a86fe10e4abe9400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0eb3c7d4-22", "ovs_interfaceid": "0eb3c7d4-224c-4d94-9450-0623a1e1b162", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1064.209491] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122386, 'name': CreateVM_Task, 'duration_secs': 0.47962} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.215642] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1064.216428] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/24ed4c28-b352-4867-857b-17f9624cc455" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1064.216658] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquired lock "[datastore2] devstack-image-cache_base/24ed4c28-b352-4867-857b-17f9624cc455" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.217023] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/24ed4c28-b352-4867-857b-17f9624cc455" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1064.218103] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de4baeb5-61a9-4696-a472-e288bfd709b9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.225547] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122385, 'name': ReconfigVM_Task, 'duration_secs': 1.127992} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.225795] env[62070]: DEBUG oslo_vmware.api [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122387, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497549} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.226459] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Reconfigured VM instance instance-00000068 to attach disk [datastore2] 27d9b478-7ebb-4313-a314-679ca0292086/27d9b478-7ebb-4313-a314-679ca0292086.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1064.227134] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 20c4fabc-fc9b-49c7-ab28-fa092ad66038/20c4fabc-fc9b-49c7-ab28-fa092ad66038.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1064.227364] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1064.227605] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-73a6b092-bd30-4d40-97d3-f85f17c15479 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.229257] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b36bfe07-02a0-4ceb-9d19-209cc15cccf3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.232696] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 1064.232696] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ee18da-6ffb-1f9e-16fe-b229f2273676" [ 1064.232696] env[62070]: _type = "Task" [ 1064.232696] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.240974] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 1064.240974] env[62070]: value = "task-1122390" [ 1064.240974] env[62070]: _type = "Task" [ 1064.240974] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.241272] env[62070]: DEBUG oslo_vmware.api [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1064.241272] env[62070]: value = "task-1122391" [ 1064.241272] env[62070]: _type = "Task" [ 1064.241272] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.255925] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Releasing lock "[datastore2] devstack-image-cache_base/24ed4c28-b352-4867-857b-17f9624cc455" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1064.256305] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Processing image 24ed4c28-b352-4867-857b-17f9624cc455 {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1064.256588] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/24ed4c28-b352-4867-857b-17f9624cc455/24ed4c28-b352-4867-857b-17f9624cc455.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1064.256780] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquired lock "[datastore2] devstack-image-cache_base/24ed4c28-b352-4867-857b-17f9624cc455/24ed4c28-b352-4867-857b-17f9624cc455.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.257072] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1064.257314] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e83018b2-3f27-451e-a242-f446ccdbe669 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.267378] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122390, 'name': Rename_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.267727] env[62070]: DEBUG oslo_vmware.api [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122391, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.275984] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1064.276233] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1064.277076] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8334e852-a470-47b7-b216-184d991aaa77 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.283644] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 1064.283644] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52bf5e3e-f5ec-e2b1-a905-d2b70c172896" [ 1064.283644] env[62070]: _type = "Task" [ 1064.283644] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.293550] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52bf5e3e-f5ec-e2b1-a905-d2b70c172896, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.415556] env[62070]: DEBUG oslo_concurrency.lockutils [req-d5ee948d-fa9f-4a99-9104-4af49cc320d9 req-d897ee6f-ac2c-4a94-b7de-c1118b4ba599 service nova] Releasing lock "refresh_cache-20c4fabc-fc9b-49c7-ab28-fa092ad66038" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1064.756151] env[62070]: DEBUG oslo_vmware.api [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122391, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071911} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.759536] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1064.760049] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122390, 'name': Rename_Task, 'duration_secs': 0.166069} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.760649] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275f910a-a1b6-4939-9409-055475c3c1f3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.763483] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1064.763751] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2170131e-5400-4966-aa14-018af4a56eb3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.786684] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] 20c4fabc-fc9b-49c7-ab28-fa092ad66038/20c4fabc-fc9b-49c7-ab28-fa092ad66038.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1064.788401] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e5001ea-73a9-4472-ba02-33bc4f82db8e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.803453] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 1064.803453] env[62070]: value = "task-1122392" [ 1064.803453] env[62070]: _type = "Task" [ 1064.803453] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.813332] env[62070]: DEBUG oslo_vmware.api [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1064.813332] env[62070]: value = "task-1122393" [ 1064.813332] env[62070]: _type = "Task" [ 1064.813332] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.819903] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122392, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.822977] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Preparing fetch location {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1064.823227] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Fetch image to [datastore2] OSTACK_IMG_5f92d90f-3ab8-4300-b189-6acb0d204b76/OSTACK_IMG_5f92d90f-3ab8-4300-b189-6acb0d204b76.vmdk {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1064.823412] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Downloading stream optimized image 24ed4c28-b352-4867-857b-17f9624cc455 to [datastore2] OSTACK_IMG_5f92d90f-3ab8-4300-b189-6acb0d204b76/OSTACK_IMG_5f92d90f-3ab8-4300-b189-6acb0d204b76.vmdk on the data store datastore2 as vApp {{(pid=62070) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1064.823586] env[62070]: DEBUG nova.virt.vmwareapi.images [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Downloading image file data 24ed4c28-b352-4867-857b-17f9624cc455 to the ESX as VM named 'OSTACK_IMG_5f92d90f-3ab8-4300-b189-6acb0d204b76' {{(pid=62070) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1064.831440] env[62070]: DEBUG oslo_vmware.api [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122393, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.873026] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.282s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.925063] env[62070]: DEBUG oslo_vmware.rw_handles [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1064.925063] env[62070]: value = "resgroup-9" [ 1064.925063] env[62070]: _type = "ResourcePool" [ 1064.925063] env[62070]: }. {{(pid=62070) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1064.925063] env[62070]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-af20135f-8e21-44fc-afca-d298ed551048 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.952761] env[62070]: DEBUG oslo_vmware.rw_handles [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lease: (returnval){ [ 1064.952761] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ae6634-e291-6e4f-665f-6a03a85bb4de" [ 1064.952761] env[62070]: _type = "HttpNfcLease" [ 1064.952761] env[62070]: } obtained for vApp import into resource pool (val){ [ 1064.952761] env[62070]: value = "resgroup-9" [ 1064.952761] env[62070]: _type = "ResourcePool" [ 1064.952761] env[62070]: }. {{(pid=62070) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1064.954635] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the lease: (returnval){ [ 1064.954635] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ae6634-e291-6e4f-665f-6a03a85bb4de" [ 1064.954635] env[62070]: _type = "HttpNfcLease" [ 1064.954635] env[62070]: } to be ready. {{(pid=62070) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1064.962307] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1064.962307] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ae6634-e291-6e4f-665f-6a03a85bb4de" [ 1064.962307] env[62070]: _type = "HttpNfcLease" [ 1064.962307] env[62070]: } is initializing. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1065.186444] env[62070]: DEBUG nova.compute.manager [req-9bc02749-1af7-41dc-a51d-586735d019cb req-55b9ee77-6608-4657-928b-aab1275c4d19 service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Received event network-vif-plugged-2c6759e4-b6e7-4b67-b06d-d38d6043d3b2 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1065.186444] env[62070]: DEBUG oslo_concurrency.lockutils [req-9bc02749-1af7-41dc-a51d-586735d019cb req-55b9ee77-6608-4657-928b-aab1275c4d19 service nova] Acquiring lock "53a1791d-38fd-4721-b82c-2f0922348300-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1065.186444] env[62070]: DEBUG oslo_concurrency.lockutils [req-9bc02749-1af7-41dc-a51d-586735d019cb req-55b9ee77-6608-4657-928b-aab1275c4d19 service nova] Lock "53a1791d-38fd-4721-b82c-2f0922348300-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1065.186921] env[62070]: DEBUG oslo_concurrency.lockutils [req-9bc02749-1af7-41dc-a51d-586735d019cb req-55b9ee77-6608-4657-928b-aab1275c4d19 service nova] Lock "53a1791d-38fd-4721-b82c-2f0922348300-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1065.186921] env[62070]: DEBUG nova.compute.manager [req-9bc02749-1af7-41dc-a51d-586735d019cb req-55b9ee77-6608-4657-928b-aab1275c4d19 service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] No waiting events found dispatching network-vif-plugged-2c6759e4-b6e7-4b67-b06d-d38d6043d3b2 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1065.187112] env[62070]: WARNING nova.compute.manager [req-9bc02749-1af7-41dc-a51d-586735d019cb req-55b9ee77-6608-4657-928b-aab1275c4d19 service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Received unexpected event network-vif-plugged-2c6759e4-b6e7-4b67-b06d-d38d6043d3b2 for instance with vm_state shelved_offloaded and task_state spawning. [ 1065.187247] env[62070]: DEBUG nova.compute.manager [req-9bc02749-1af7-41dc-a51d-586735d019cb req-55b9ee77-6608-4657-928b-aab1275c4d19 service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Received event network-changed-2c6759e4-b6e7-4b67-b06d-d38d6043d3b2 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1065.187407] env[62070]: DEBUG nova.compute.manager [req-9bc02749-1af7-41dc-a51d-586735d019cb req-55b9ee77-6608-4657-928b-aab1275c4d19 service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Refreshing instance network info cache due to event network-changed-2c6759e4-b6e7-4b67-b06d-d38d6043d3b2. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1065.187674] env[62070]: DEBUG oslo_concurrency.lockutils [req-9bc02749-1af7-41dc-a51d-586735d019cb req-55b9ee77-6608-4657-928b-aab1275c4d19 service nova] Acquiring lock "refresh_cache-53a1791d-38fd-4721-b82c-2f0922348300" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1065.187828] env[62070]: DEBUG oslo_concurrency.lockutils [req-9bc02749-1af7-41dc-a51d-586735d019cb req-55b9ee77-6608-4657-928b-aab1275c4d19 service nova] Acquired lock "refresh_cache-53a1791d-38fd-4721-b82c-2f0922348300" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.187989] env[62070]: DEBUG nova.network.neutron [req-9bc02749-1af7-41dc-a51d-586735d019cb req-55b9ee77-6608-4657-928b-aab1275c4d19 service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Refreshing network info cache for port 2c6759e4-b6e7-4b67-b06d-d38d6043d3b2 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1065.317581] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122392, 'name': PowerOnVM_Task} progress is 70%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.329038] env[62070]: DEBUG oslo_vmware.api [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122393, 'name': ReconfigVM_Task, 'duration_secs': 0.334847} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.329591] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Reconfigured VM instance instance-00000069 to attach disk [datastore2] 20c4fabc-fc9b-49c7-ab28-fa092ad66038/20c4fabc-fc9b-49c7-ab28-fa092ad66038.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1065.330045] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d2396aec-30ac-440f-bcab-308c88d47a9f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.337305] env[62070]: DEBUG oslo_vmware.api [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1065.337305] env[62070]: value = "task-1122395" [ 1065.337305] env[62070]: _type = "Task" [ 1065.337305] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.345639] env[62070]: DEBUG oslo_vmware.api [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122395, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.433693] env[62070]: INFO nova.scheduler.client.report [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Deleted allocation for migration 947ffd09-2ce3-4070-89c6-6ce3369d0dd2 [ 1065.463406] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1065.463406] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ae6634-e291-6e4f-665f-6a03a85bb4de" [ 1065.463406] env[62070]: _type = "HttpNfcLease" [ 1065.463406] env[62070]: } is initializing. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1065.822019] env[62070]: DEBUG oslo_vmware.api [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122392, 'name': PowerOnVM_Task, 'duration_secs': 0.853254} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.822019] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1065.822019] env[62070]: INFO nova.compute.manager [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Took 8.63 seconds to spawn the instance on the hypervisor. [ 1065.822019] env[62070]: DEBUG nova.compute.manager [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1065.822019] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e6825b-b7b5-4154-8476-742da274eb1c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.850071] env[62070]: DEBUG oslo_vmware.api [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122395, 'name': Rename_Task, 'duration_secs': 0.172435} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.850357] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1065.850663] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-417bf847-f1f7-493e-82fe-94c14c60b1fa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.858220] env[62070]: DEBUG oslo_vmware.api [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1065.858220] env[62070]: value = "task-1122397" [ 1065.858220] env[62070]: _type = "Task" [ 1065.858220] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.868262] env[62070]: DEBUG oslo_vmware.api [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122397, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.925147] env[62070]: DEBUG nova.network.neutron [req-9bc02749-1af7-41dc-a51d-586735d019cb req-55b9ee77-6608-4657-928b-aab1275c4d19 service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Updated VIF entry in instance network info cache for port 2c6759e4-b6e7-4b67-b06d-d38d6043d3b2. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1065.925569] env[62070]: DEBUG nova.network.neutron [req-9bc02749-1af7-41dc-a51d-586735d019cb req-55b9ee77-6608-4657-928b-aab1275c4d19 service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Updating instance_info_cache with network_info: [{"id": "2c6759e4-b6e7-4b67-b06d-d38d6043d3b2", "address": "fa:16:3e:11:96:b8", "network": {"id": "6a62b79f-a98b-4518-86cb-facc7b77da1d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2107556336-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "772f7fcee5f44b899b6df797e1ed5ddd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c6759e4-b6", "ovs_interfaceid": "2c6759e4-b6e7-4b67-b06d-d38d6043d3b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.940416] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "33d04e59-da01-4ba3-ac42-ab93372a332d" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 12.787s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1065.964025] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1065.964025] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ae6634-e291-6e4f-665f-6a03a85bb4de" [ 1065.964025] env[62070]: _type = "HttpNfcLease" [ 1065.964025] env[62070]: } is ready. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1065.964348] env[62070]: DEBUG oslo_vmware.rw_handles [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1065.964348] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ae6634-e291-6e4f-665f-6a03a85bb4de" [ 1065.964348] env[62070]: _type = "HttpNfcLease" [ 1065.964348] env[62070]: }. {{(pid=62070) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1065.965099] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac9a49f2-9584-4dd6-a238-af5c75a64e36 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.973772] env[62070]: DEBUG oslo_vmware.rw_handles [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52862e79-e56e-d5e6-268b-3bf0127cf4d2/disk-0.vmdk from lease info. {{(pid=62070) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1065.973965] env[62070]: DEBUG oslo_vmware.rw_handles [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Creating HTTP connection to write to file with size = 31668736 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52862e79-e56e-d5e6-268b-3bf0127cf4d2/disk-0.vmdk. {{(pid=62070) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1066.039325] env[62070]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7a297b63-b6c9-4b3a-88f1-e3e72c5d8fa9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.344556] env[62070]: INFO nova.compute.manager [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Took 18.82 seconds to build instance. [ 1066.372699] env[62070]: DEBUG oslo_vmware.api [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122397, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.429953] env[62070]: DEBUG oslo_concurrency.lockutils [req-9bc02749-1af7-41dc-a51d-586735d019cb req-55b9ee77-6608-4657-928b-aab1275c4d19 service nova] Releasing lock "refresh_cache-53a1791d-38fd-4721-b82c-2f0922348300" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1066.846652] env[62070]: DEBUG oslo_concurrency.lockutils [None req-150f7b60-f8d6-4378-88a6-27be0c832d10 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "27d9b478-7ebb-4313-a314-679ca0292086" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.346s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1066.871640] env[62070]: DEBUG oslo_vmware.api [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122397, 'name': PowerOnVM_Task, 'duration_secs': 0.548581} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.871793] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1066.872016] env[62070]: INFO nova.compute.manager [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Took 7.35 seconds to spawn the instance on the hypervisor. [ 1066.872229] env[62070]: DEBUG nova.compute.manager [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1066.873058] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92fd768a-8097-41b9-8221-0f73f2dc70d6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.889218] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Acquiring lock "b1137be1-b66b-4eb2-bdbd-1db6173a1f93" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1066.889218] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Lock "b1137be1-b66b-4eb2-bdbd-1db6173a1f93" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.196821] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "33d04e59-da01-4ba3-ac42-ab93372a332d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.197150] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "33d04e59-da01-4ba3-ac42-ab93372a332d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.197386] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "33d04e59-da01-4ba3-ac42-ab93372a332d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.197584] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "33d04e59-da01-4ba3-ac42-ab93372a332d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.197756] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "33d04e59-da01-4ba3-ac42-ab93372a332d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.201711] env[62070]: INFO nova.compute.manager [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Terminating instance [ 1067.203950] env[62070]: DEBUG nova.compute.manager [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1067.204133] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1067.205288] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-536b1c2a-f3cc-4030-859e-a4eb8e52059c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.214927] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1067.215288] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-491739ac-7639-4721-b22b-6429687e670d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.223695] env[62070]: DEBUG oslo_vmware.api [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 1067.223695] env[62070]: value = "task-1122399" [ 1067.223695] env[62070]: _type = "Task" [ 1067.223695] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.236127] env[62070]: DEBUG oslo_vmware.api [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122399, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.396033] env[62070]: DEBUG nova.compute.manager [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1067.407720] env[62070]: INFO nova.compute.manager [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Took 17.25 seconds to build instance. [ 1067.473288] env[62070]: DEBUG oslo_vmware.rw_handles [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Completed reading data from the image iterator. {{(pid=62070) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1067.473793] env[62070]: DEBUG oslo_vmware.rw_handles [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52862e79-e56e-d5e6-268b-3bf0127cf4d2/disk-0.vmdk. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1067.475102] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb2395a3-3a92-4f3c-a6ca-c4921977bc59 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.484353] env[62070]: DEBUG oslo_vmware.rw_handles [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52862e79-e56e-d5e6-268b-3bf0127cf4d2/disk-0.vmdk is in state: ready. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1067.484572] env[62070]: DEBUG oslo_vmware.rw_handles [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52862e79-e56e-d5e6-268b-3bf0127cf4d2/disk-0.vmdk. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1067.484867] env[62070]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-7b528f70-601a-4a97-9b86-b3e1f801f65b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.561947] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8b0af9b1-ec76-41e4-ae6d-716f7bf2fb62 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "27d9b478-7ebb-4313-a314-679ca0292086" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.562288] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8b0af9b1-ec76-41e4-ae6d-716f7bf2fb62 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "27d9b478-7ebb-4313-a314-679ca0292086" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.562484] env[62070]: DEBUG nova.compute.manager [None req-8b0af9b1-ec76-41e4-ae6d-716f7bf2fb62 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1067.563419] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3428e1a-e648-4e91-9971-23243f6cad01 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.571432] env[62070]: DEBUG nova.compute.manager [None req-8b0af9b1-ec76-41e4-ae6d-716f7bf2fb62 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62070) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1067.572015] env[62070]: DEBUG nova.objects.instance [None req-8b0af9b1-ec76-41e4-ae6d-716f7bf2fb62 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lazy-loading 'flavor' on Instance uuid 27d9b478-7ebb-4313-a314-679ca0292086 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1067.605372] env[62070]: DEBUG nova.compute.manager [req-5d9ad580-ab38-4780-ba6f-e7d8e3ceb468 req-5e0fde5b-4683-4b6f-ace9-9c0d5fdc72fe service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Received event network-changed-0eb3c7d4-224c-4d94-9450-0623a1e1b162 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1067.605372] env[62070]: DEBUG nova.compute.manager [req-5d9ad580-ab38-4780-ba6f-e7d8e3ceb468 req-5e0fde5b-4683-4b6f-ace9-9c0d5fdc72fe service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Refreshing instance network info cache due to event network-changed-0eb3c7d4-224c-4d94-9450-0623a1e1b162. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1067.605756] env[62070]: DEBUG oslo_concurrency.lockutils [req-5d9ad580-ab38-4780-ba6f-e7d8e3ceb468 req-5e0fde5b-4683-4b6f-ace9-9c0d5fdc72fe service nova] Acquiring lock "refresh_cache-20c4fabc-fc9b-49c7-ab28-fa092ad66038" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1067.606077] env[62070]: DEBUG oslo_concurrency.lockutils [req-5d9ad580-ab38-4780-ba6f-e7d8e3ceb468 req-5e0fde5b-4683-4b6f-ace9-9c0d5fdc72fe service nova] Acquired lock "refresh_cache-20c4fabc-fc9b-49c7-ab28-fa092ad66038" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.606395] env[62070]: DEBUG nova.network.neutron [req-5d9ad580-ab38-4780-ba6f-e7d8e3ceb468 req-5e0fde5b-4683-4b6f-ace9-9c0d5fdc72fe service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Refreshing network info cache for port 0eb3c7d4-224c-4d94-9450-0623a1e1b162 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1067.679398] env[62070]: DEBUG nova.objects.instance [None req-bc9a36f7-cb9c-494e-bbf1-e85b37f351b3 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Lazy-loading 'flavor' on Instance uuid 7bfda953-ac95-4dce-b7a7-c570eae35582 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1067.734281] env[62070]: DEBUG oslo_vmware.api [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122399, 'name': PowerOffVM_Task, 'duration_secs': 0.237911} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.734381] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1067.734558] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1067.734843] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9e09dc59-d53b-411e-8436-6d306e882018 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.911980] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe7daec4-1432-4f51-b956-c8e29c87e358 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.766s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.928977] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.928977] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.930816] env[62070]: INFO nova.compute.claims [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1068.043525] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1068.043900] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1068.044147] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Deleting the datastore file [datastore1] 33d04e59-da01-4ba3-ac42-ab93372a332d {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1068.044463] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7db82455-eb3f-4c51-be79-61e694a12bc2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.052580] env[62070]: DEBUG oslo_vmware.api [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for the task: (returnval){ [ 1068.052580] env[62070]: value = "task-1122401" [ 1068.052580] env[62070]: _type = "Task" [ 1068.052580] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.061960] env[62070]: DEBUG oslo_vmware.api [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122401, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.078299] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b0af9b1-ec76-41e4-ae6d-716f7bf2fb62 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1068.078608] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-684e24c6-b999-41ca-911b-31f055ec8828 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.086198] env[62070]: DEBUG oslo_vmware.api [None req-8b0af9b1-ec76-41e4-ae6d-716f7bf2fb62 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 1068.086198] env[62070]: value = "task-1122402" [ 1068.086198] env[62070]: _type = "Task" [ 1068.086198] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.094993] env[62070]: DEBUG oslo_vmware.api [None req-8b0af9b1-ec76-41e4-ae6d-716f7bf2fb62 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122402, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.096836] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-143965fc-7e83-4cd2-ba21-818f43f3748e tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Volume attach. Driver type: vmdk {{(pid=62070) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1068.097061] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-143965fc-7e83-4cd2-ba21-818f43f3748e tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245509', 'volume_id': '73eaeb2a-8061-49f0-ba0e-4c1fc41e031f', 'name': 'volume-73eaeb2a-8061-49f0-ba0e-4c1fc41e031f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5cccd79d-d243-49db-8581-718dd594f3b3', 'attached_at': '', 'detached_at': '', 'volume_id': '73eaeb2a-8061-49f0-ba0e-4c1fc41e031f', 'serial': '73eaeb2a-8061-49f0-ba0e-4c1fc41e031f'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1068.098147] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3de7932-a989-4090-9e51-befe1f60e439 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.117025] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-403dc936-7d5e-48cc-951a-5fcbb9b9165d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.147871] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-143965fc-7e83-4cd2-ba21-818f43f3748e tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] volume-73eaeb2a-8061-49f0-ba0e-4c1fc41e031f/volume-73eaeb2a-8061-49f0-ba0e-4c1fc41e031f.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1068.148260] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b424e0e5-db0b-44b6-9907-c43ad50d94d2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.170798] env[62070]: DEBUG oslo_vmware.api [None req-143965fc-7e83-4cd2-ba21-818f43f3748e tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 1068.170798] env[62070]: value = "task-1122403" [ 1068.170798] env[62070]: _type = "Task" [ 1068.170798] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.181441] env[62070]: DEBUG oslo_vmware.api [None req-143965fc-7e83-4cd2-ba21-818f43f3748e tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122403, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.187032] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bc9a36f7-cb9c-494e-bbf1-e85b37f351b3 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Acquiring lock "refresh_cache-7bfda953-ac95-4dce-b7a7-c570eae35582" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1068.187032] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bc9a36f7-cb9c-494e-bbf1-e85b37f351b3 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Acquired lock "refresh_cache-7bfda953-ac95-4dce-b7a7-c570eae35582" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.228234] env[62070]: DEBUG oslo_vmware.rw_handles [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52862e79-e56e-d5e6-268b-3bf0127cf4d2/disk-0.vmdk. {{(pid=62070) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1068.228234] env[62070]: INFO nova.virt.vmwareapi.images [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Downloaded image file data 24ed4c28-b352-4867-857b-17f9624cc455 [ 1068.228234] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c1de4e-5665-4b52-b3d3-9192152ff803 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.251096] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7f954355-6698-429c-870c-b2d36af976a7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.282998] env[62070]: INFO nova.virt.vmwareapi.images [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] The imported VM was unregistered [ 1068.285750] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Caching image {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1068.286046] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Creating directory with path [datastore2] devstack-image-cache_base/24ed4c28-b352-4867-857b-17f9624cc455 {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1068.286359] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d70bc5b-1ab2-4908-ab4c-340190fb281e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.300358] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Created directory with path [datastore2] devstack-image-cache_base/24ed4c28-b352-4867-857b-17f9624cc455 {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1068.300658] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_5f92d90f-3ab8-4300-b189-6acb0d204b76/OSTACK_IMG_5f92d90f-3ab8-4300-b189-6acb0d204b76.vmdk to [datastore2] devstack-image-cache_base/24ed4c28-b352-4867-857b-17f9624cc455/24ed4c28-b352-4867-857b-17f9624cc455.vmdk. {{(pid=62070) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1068.300954] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-53cdf115-5ffd-4f7b-87ad-ba6dd601a834 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.309737] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 1068.309737] env[62070]: value = "task-1122406" [ 1068.309737] env[62070]: _type = "Task" [ 1068.309737] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.319409] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122406, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.405985] env[62070]: DEBUG nova.network.neutron [req-5d9ad580-ab38-4780-ba6f-e7d8e3ceb468 req-5e0fde5b-4683-4b6f-ace9-9c0d5fdc72fe service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Updated VIF entry in instance network info cache for port 0eb3c7d4-224c-4d94-9450-0623a1e1b162. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1068.406902] env[62070]: DEBUG nova.network.neutron [req-5d9ad580-ab38-4780-ba6f-e7d8e3ceb468 req-5e0fde5b-4683-4b6f-ace9-9c0d5fdc72fe service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Updating instance_info_cache with network_info: [{"id": "0eb3c7d4-224c-4d94-9450-0623a1e1b162", "address": "fa:16:3e:d9:a2:6e", "network": {"id": "b9ef8f6c-bbd6-409d-a591-ad584e5e028f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-599171324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca25fba006b740f2a86fe10e4abe9400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0eb3c7d4-22", "ovs_interfaceid": "0eb3c7d4-224c-4d94-9450-0623a1e1b162", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.563053] env[62070]: DEBUG oslo_vmware.api [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Task: {'id': task-1122401, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.321226} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.563339] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1068.563534] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1068.563719] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1068.563895] env[62070]: INFO nova.compute.manager [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Took 1.36 seconds to destroy the instance on the hypervisor. [ 1068.564162] env[62070]: DEBUG oslo.service.loopingcall [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1068.564363] env[62070]: DEBUG nova.compute.manager [-] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1068.564459] env[62070]: DEBUG nova.network.neutron [-] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1068.596357] env[62070]: DEBUG oslo_vmware.api [None req-8b0af9b1-ec76-41e4-ae6d-716f7bf2fb62 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122402, 'name': PowerOffVM_Task, 'duration_secs': 0.300081} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.598583] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b0af9b1-ec76-41e4-ae6d-716f7bf2fb62 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1068.598793] env[62070]: DEBUG nova.compute.manager [None req-8b0af9b1-ec76-41e4-ae6d-716f7bf2fb62 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1068.599606] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-356db895-70a5-49fe-995a-f697f050ebcb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.622149] env[62070]: DEBUG nova.network.neutron [None req-bc9a36f7-cb9c-494e-bbf1-e85b37f351b3 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1068.681768] env[62070]: DEBUG oslo_vmware.api [None req-143965fc-7e83-4cd2-ba21-818f43f3748e tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122403, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.819677] env[62070]: DEBUG nova.compute.manager [req-a9d66a5d-2a0d-43a1-b4f2-25752ead46ce req-8245a1f6-51e0-4a01-99e8-4074afd49b6f service nova] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Received event network-vif-deleted-222fba36-759a-41f7-a82a-cb4047bd3725 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1068.819883] env[62070]: INFO nova.compute.manager [req-a9d66a5d-2a0d-43a1-b4f2-25752ead46ce req-8245a1f6-51e0-4a01-99e8-4074afd49b6f service nova] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Neutron deleted interface 222fba36-759a-41f7-a82a-cb4047bd3725; detaching it from the instance and deleting it from the info cache [ 1068.820076] env[62070]: DEBUG nova.network.neutron [req-a9d66a5d-2a0d-43a1-b4f2-25752ead46ce req-8245a1f6-51e0-4a01-99e8-4074afd49b6f service nova] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.825815] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122406, 'name': MoveVirtualDisk_Task} progress is 12%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.909556] env[62070]: DEBUG oslo_concurrency.lockutils [req-5d9ad580-ab38-4780-ba6f-e7d8e3ceb468 req-5e0fde5b-4683-4b6f-ace9-9c0d5fdc72fe service nova] Releasing lock "refresh_cache-20c4fabc-fc9b-49c7-ab28-fa092ad66038" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1069.114472] env[62070]: DEBUG oslo_concurrency.lockutils [None req-8b0af9b1-ec76-41e4-ae6d-716f7bf2fb62 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "27d9b478-7ebb-4313-a314-679ca0292086" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.552s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.140917] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5422135e-7dba-4b9a-8756-23fe35e18fd0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.150424] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfbc3bec-5092-4d2b-b771-6d36212c3475 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.190197] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58184567-d3ad-45fc-a17c-392511f6227f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.202582] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f581027-71f5-40af-8b0a-d08fcc27cd00 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.206353] env[62070]: DEBUG oslo_vmware.api [None req-143965fc-7e83-4cd2-ba21-818f43f3748e tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122403, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.218277] env[62070]: DEBUG nova.compute.provider_tree [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1069.298671] env[62070]: DEBUG nova.network.neutron [-] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.321718] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122406, 'name': MoveVirtualDisk_Task} progress is 32%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.328279] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-97a2c2e6-d739-48cc-83f4-4f03492fa4e0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.344031] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b81db63e-b5ab-45e4-91b0-b1cb101b342a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.378877] env[62070]: DEBUG nova.compute.manager [req-a9d66a5d-2a0d-43a1-b4f2-25752ead46ce req-8245a1f6-51e0-4a01-99e8-4074afd49b6f service nova] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Detach interface failed, port_id=222fba36-759a-41f7-a82a-cb4047bd3725, reason: Instance 33d04e59-da01-4ba3-ac42-ab93372a332d could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1069.444826] env[62070]: DEBUG nova.network.neutron [None req-bc9a36f7-cb9c-494e-bbf1-e85b37f351b3 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Updating instance_info_cache with network_info: [{"id": "8372f59a-de9e-4062-be0b-39996b8f6c9e", "address": "fa:16:3e:5b:b2:cf", "network": {"id": "b8a611af-59c0-4ce4-9889-55902a16c816", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-841643028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3587df64b22e4cfc8220532cdda18c28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8372f59a-de", "ovs_interfaceid": "8372f59a-de9e-4062-be0b-39996b8f6c9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.635677] env[62070]: DEBUG nova.compute.manager [req-5362e20b-f0d2-4a4c-870f-e3e26a67d1ba req-67762cba-edd2-40bf-beae-59c3a9bd29dc service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Received event network-changed-8372f59a-de9e-4062-be0b-39996b8f6c9e {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1069.635873] env[62070]: DEBUG nova.compute.manager [req-5362e20b-f0d2-4a4c-870f-e3e26a67d1ba req-67762cba-edd2-40bf-beae-59c3a9bd29dc service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Refreshing instance network info cache due to event network-changed-8372f59a-de9e-4062-be0b-39996b8f6c9e. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1069.636126] env[62070]: DEBUG oslo_concurrency.lockutils [req-5362e20b-f0d2-4a4c-870f-e3e26a67d1ba req-67762cba-edd2-40bf-beae-59c3a9bd29dc service nova] Acquiring lock "refresh_cache-7bfda953-ac95-4dce-b7a7-c570eae35582" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1069.694719] env[62070]: DEBUG oslo_vmware.api [None req-143965fc-7e83-4cd2-ba21-818f43f3748e tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122403, 'name': ReconfigVM_Task, 'duration_secs': 1.395935} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.695068] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-143965fc-7e83-4cd2-ba21-818f43f3748e tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Reconfigured VM instance instance-0000005f to attach disk [datastore2] volume-73eaeb2a-8061-49f0-ba0e-4c1fc41e031f/volume-73eaeb2a-8061-49f0-ba0e-4c1fc41e031f.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1069.700180] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-629007f4-9883-47d7-93e5-7e42952b975e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.719924] env[62070]: DEBUG oslo_vmware.api [None req-143965fc-7e83-4cd2-ba21-818f43f3748e tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 1069.719924] env[62070]: value = "task-1122407" [ 1069.719924] env[62070]: _type = "Task" [ 1069.719924] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.720991] env[62070]: DEBUG nova.scheduler.client.report [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1069.734220] env[62070]: DEBUG oslo_vmware.api [None req-143965fc-7e83-4cd2-ba21-818f43f3748e tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122407, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.800636] env[62070]: INFO nova.compute.manager [-] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Took 1.24 seconds to deallocate network for instance. [ 1069.823644] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122406, 'name': MoveVirtualDisk_Task} progress is 52%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.914721] env[62070]: DEBUG nova.objects.instance [None req-c6161150-bb59-4411-9190-3af04d1ac149 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Lazy-loading 'flavor' on Instance uuid 7bfda953-ac95-4dce-b7a7-c570eae35582 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1069.950133] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bc9a36f7-cb9c-494e-bbf1-e85b37f351b3 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Releasing lock "refresh_cache-7bfda953-ac95-4dce-b7a7-c570eae35582" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1069.950482] env[62070]: DEBUG nova.compute.manager [None req-bc9a36f7-cb9c-494e-bbf1-e85b37f351b3 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Inject network info {{(pid=62070) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7219}} [ 1069.950788] env[62070]: DEBUG nova.compute.manager [None req-bc9a36f7-cb9c-494e-bbf1-e85b37f351b3 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] network_info to inject: |[{"id": "8372f59a-de9e-4062-be0b-39996b8f6c9e", "address": "fa:16:3e:5b:b2:cf", "network": {"id": "b8a611af-59c0-4ce4-9889-55902a16c816", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-841643028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3587df64b22e4cfc8220532cdda18c28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8372f59a-de", "ovs_interfaceid": "8372f59a-de9e-4062-be0b-39996b8f6c9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1069.956243] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bc9a36f7-cb9c-494e-bbf1-e85b37f351b3 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Reconfiguring VM instance to set the machine id {{(pid=62070) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1802}} [ 1069.957099] env[62070]: DEBUG oslo_concurrency.lockutils [req-5362e20b-f0d2-4a4c-870f-e3e26a67d1ba req-67762cba-edd2-40bf-beae-59c3a9bd29dc service nova] Acquired lock "refresh_cache-7bfda953-ac95-4dce-b7a7-c570eae35582" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.957330] env[62070]: DEBUG nova.network.neutron [req-5362e20b-f0d2-4a4c-870f-e3e26a67d1ba req-67762cba-edd2-40bf-beae-59c3a9bd29dc service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Refreshing network info cache for port 8372f59a-de9e-4062-be0b-39996b8f6c9e {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1069.959471] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dde93545-6fa0-4823-8022-64e8ecbe8ce4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.980047] env[62070]: DEBUG oslo_vmware.api [None req-bc9a36f7-cb9c-494e-bbf1-e85b37f351b3 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Waiting for the task: (returnval){ [ 1069.980047] env[62070]: value = "task-1122408" [ 1069.980047] env[62070]: _type = "Task" [ 1069.980047] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.996831] env[62070]: DEBUG oslo_vmware.api [None req-bc9a36f7-cb9c-494e-bbf1-e85b37f351b3 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': task-1122408, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.232524] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.303s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1070.233053] env[62070]: DEBUG nova.compute.manager [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1070.237912] env[62070]: DEBUG oslo_vmware.api [None req-143965fc-7e83-4cd2-ba21-818f43f3748e tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122407, 'name': ReconfigVM_Task, 'duration_secs': 0.442661} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.238153] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-143965fc-7e83-4cd2-ba21-818f43f3748e tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245509', 'volume_id': '73eaeb2a-8061-49f0-ba0e-4c1fc41e031f', 'name': 'volume-73eaeb2a-8061-49f0-ba0e-4c1fc41e031f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5cccd79d-d243-49db-8581-718dd594f3b3', 'attached_at': '', 'detached_at': '', 'volume_id': '73eaeb2a-8061-49f0-ba0e-4c1fc41e031f', 'serial': '73eaeb2a-8061-49f0-ba0e-4c1fc41e031f'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1070.313187] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1070.317517] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.005s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1070.317791] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1070.341984] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122406, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.356643] env[62070]: INFO nova.scheduler.client.report [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Deleted allocations for instance 33d04e59-da01-4ba3-ac42-ab93372a332d [ 1070.419600] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c6161150-bb59-4411-9190-3af04d1ac149 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Acquiring lock "refresh_cache-7bfda953-ac95-4dce-b7a7-c570eae35582" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1070.450232] env[62070]: DEBUG oslo_concurrency.lockutils [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "27d9b478-7ebb-4313-a314-679ca0292086" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1070.450630] env[62070]: DEBUG oslo_concurrency.lockutils [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "27d9b478-7ebb-4313-a314-679ca0292086" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1070.450775] env[62070]: DEBUG oslo_concurrency.lockutils [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "27d9b478-7ebb-4313-a314-679ca0292086-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1070.450957] env[62070]: DEBUG oslo_concurrency.lockutils [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "27d9b478-7ebb-4313-a314-679ca0292086-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1070.451256] env[62070]: DEBUG oslo_concurrency.lockutils [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "27d9b478-7ebb-4313-a314-679ca0292086-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1070.453629] env[62070]: INFO nova.compute.manager [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Terminating instance [ 1070.455737] env[62070]: DEBUG nova.compute.manager [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1070.455950] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1070.457043] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d8fea9-2b7e-4c91-9730-78a931d71f20 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.468396] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1070.468712] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5f76ae9b-e09e-4e97-a507-739ab988b24b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.490773] env[62070]: DEBUG oslo_vmware.api [None req-bc9a36f7-cb9c-494e-bbf1-e85b37f351b3 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': task-1122408, 'name': ReconfigVM_Task, 'duration_secs': 0.194987} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.491123] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bc9a36f7-cb9c-494e-bbf1-e85b37f351b3 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Reconfigured VM instance to set the machine id {{(pid=62070) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1805}} [ 1070.567666] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1070.567666] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1070.567666] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Deleting the datastore file [datastore2] 27d9b478-7ebb-4313-a314-679ca0292086 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1070.567666] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30fbe382-c8fe-4a70-8072-38d8fafe594c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.576598] env[62070]: DEBUG oslo_vmware.api [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 1070.576598] env[62070]: value = "task-1122411" [ 1070.576598] env[62070]: _type = "Task" [ 1070.576598] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.586586] env[62070]: DEBUG oslo_vmware.api [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122411, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.745245] env[62070]: DEBUG nova.compute.utils [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1070.748746] env[62070]: DEBUG nova.compute.manager [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Not allocating networking since 'none' was specified. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1070.758623] env[62070]: DEBUG nova.network.neutron [req-5362e20b-f0d2-4a4c-870f-e3e26a67d1ba req-67762cba-edd2-40bf-beae-59c3a9bd29dc service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Updated VIF entry in instance network info cache for port 8372f59a-de9e-4062-be0b-39996b8f6c9e. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1070.759329] env[62070]: DEBUG nova.network.neutron [req-5362e20b-f0d2-4a4c-870f-e3e26a67d1ba req-67762cba-edd2-40bf-beae-59c3a9bd29dc service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Updating instance_info_cache with network_info: [{"id": "8372f59a-de9e-4062-be0b-39996b8f6c9e", "address": "fa:16:3e:5b:b2:cf", "network": {"id": "b8a611af-59c0-4ce4-9889-55902a16c816", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-841643028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3587df64b22e4cfc8220532cdda18c28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8372f59a-de", "ovs_interfaceid": "8372f59a-de9e-4062-be0b-39996b8f6c9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1070.835696] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122406, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.865553] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fe955260-aba3-482d-aeff-9c9a29ad6847 tempest-DeleteServersTestJSON-2112665073 tempest-DeleteServersTestJSON-2112665073-project-member] Lock "33d04e59-da01-4ba3-ac42-ab93372a332d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.668s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.087410] env[62070]: DEBUG oslo_vmware.api [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122411, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.248928] env[62070]: DEBUG nova.compute.manager [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1071.261530] env[62070]: DEBUG oslo_concurrency.lockutils [req-5362e20b-f0d2-4a4c-870f-e3e26a67d1ba req-67762cba-edd2-40bf-beae-59c3a9bd29dc service nova] Releasing lock "refresh_cache-7bfda953-ac95-4dce-b7a7-c570eae35582" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1071.263043] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c6161150-bb59-4411-9190-3af04d1ac149 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Acquired lock "refresh_cache-7bfda953-ac95-4dce-b7a7-c570eae35582" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.285735] env[62070]: DEBUG nova.objects.instance [None req-143965fc-7e83-4cd2-ba21-818f43f3748e tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lazy-loading 'flavor' on Instance uuid 5cccd79d-d243-49db-8581-718dd594f3b3 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1071.336493] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122406, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.71643} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.337206] env[62070]: INFO nova.virt.vmwareapi.ds_util [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_5f92d90f-3ab8-4300-b189-6acb0d204b76/OSTACK_IMG_5f92d90f-3ab8-4300-b189-6acb0d204b76.vmdk to [datastore2] devstack-image-cache_base/24ed4c28-b352-4867-857b-17f9624cc455/24ed4c28-b352-4867-857b-17f9624cc455.vmdk. [ 1071.337364] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Cleaning up location [datastore2] OSTACK_IMG_5f92d90f-3ab8-4300-b189-6acb0d204b76 {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1071.337546] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_5f92d90f-3ab8-4300-b189-6acb0d204b76 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1071.337789] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5f1d55a1-c5c9-48f0-b553-09d86932f617 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.346269] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 1071.346269] env[62070]: value = "task-1122412" [ 1071.346269] env[62070]: _type = "Task" [ 1071.346269] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.355786] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122412, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.590334] env[62070]: DEBUG oslo_vmware.api [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122411, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.725525} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.590334] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1071.590334] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1071.590660] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1071.590660] env[62070]: INFO nova.compute.manager [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1071.591788] env[62070]: DEBUG oslo.service.loopingcall [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1071.591788] env[62070]: DEBUG nova.compute.manager [-] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1071.591788] env[62070]: DEBUG nova.network.neutron [-] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1071.602383] env[62070]: DEBUG nova.network.neutron [None req-c6161150-bb59-4411-9190-3af04d1ac149 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1071.628385] env[62070]: DEBUG nova.compute.manager [req-72f8a120-fa61-4282-a95c-f31c5973bfba req-c6a4bce2-877e-41c5-88ac-3a166ce0a2c0 service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Received event network-changed-8372f59a-de9e-4062-be0b-39996b8f6c9e {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1071.628606] env[62070]: DEBUG nova.compute.manager [req-72f8a120-fa61-4282-a95c-f31c5973bfba req-c6a4bce2-877e-41c5-88ac-3a166ce0a2c0 service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Refreshing instance network info cache due to event network-changed-8372f59a-de9e-4062-be0b-39996b8f6c9e. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1071.628806] env[62070]: DEBUG oslo_concurrency.lockutils [req-72f8a120-fa61-4282-a95c-f31c5973bfba req-c6a4bce2-877e-41c5-88ac-3a166ce0a2c0 service nova] Acquiring lock "refresh_cache-7bfda953-ac95-4dce-b7a7-c570eae35582" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1071.792133] env[62070]: DEBUG oslo_concurrency.lockutils [None req-143965fc-7e83-4cd2-ba21-818f43f3748e tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "5cccd79d-d243-49db-8581-718dd594f3b3" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.303s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.857409] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122412, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181917} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.858580] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1071.858770] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Releasing lock "[datastore2] devstack-image-cache_base/24ed4c28-b352-4867-857b-17f9624cc455/24ed4c28-b352-4867-857b-17f9624cc455.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1071.859311] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/24ed4c28-b352-4867-857b-17f9624cc455/24ed4c28-b352-4867-857b-17f9624cc455.vmdk to [datastore2] 53a1791d-38fd-4721-b82c-2f0922348300/53a1791d-38fd-4721-b82c-2f0922348300.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1071.860233] env[62070]: DEBUG nova.compute.manager [req-4f03aabd-fd59-4cbd-b21c-b3ffbd74e7e0 req-a255a01e-e664-4148-9168-88fc1f4cd633 service nova] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Received event network-vif-deleted-89776caa-2b48-4f16-8206-436d2f129585 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1071.860426] env[62070]: INFO nova.compute.manager [req-4f03aabd-fd59-4cbd-b21c-b3ffbd74e7e0 req-a255a01e-e664-4148-9168-88fc1f4cd633 service nova] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Neutron deleted interface 89776caa-2b48-4f16-8206-436d2f129585; detaching it from the instance and deleting it from the info cache [ 1071.860619] env[62070]: DEBUG nova.network.neutron [req-4f03aabd-fd59-4cbd-b21c-b3ffbd74e7e0 req-a255a01e-e664-4148-9168-88fc1f4cd633 service nova] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.861872] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7a55c2af-3d9d-4e5b-97c5-02c697d41037 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.871378] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 1071.871378] env[62070]: value = "task-1122414" [ 1071.871378] env[62070]: _type = "Task" [ 1071.871378] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.882256] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122414, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.247620] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fc4145e3-bd31-4881-b5f2-e34b24eeef20 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "5cccd79d-d243-49db-8581-718dd594f3b3" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1072.247841] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fc4145e3-bd31-4881-b5f2-e34b24eeef20 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "5cccd79d-d243-49db-8581-718dd594f3b3" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.257070] env[62070]: DEBUG nova.compute.manager [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1072.290440] env[62070]: DEBUG nova.virt.hardware [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1072.290726] env[62070]: DEBUG nova.virt.hardware [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1072.290866] env[62070]: DEBUG nova.virt.hardware [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1072.291065] env[62070]: DEBUG nova.virt.hardware [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1072.291223] env[62070]: DEBUG nova.virt.hardware [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1072.291375] env[62070]: DEBUG nova.virt.hardware [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1072.291592] env[62070]: DEBUG nova.virt.hardware [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1072.291758] env[62070]: DEBUG nova.virt.hardware [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1072.291931] env[62070]: DEBUG nova.virt.hardware [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1072.292121] env[62070]: DEBUG nova.virt.hardware [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1072.292314] env[62070]: DEBUG nova.virt.hardware [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1072.293263] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7db0cd-3afc-408d-bd18-54c13f41acbf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.306064] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4c9279-9453-4628-bddd-24c34f385105 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.321452] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Instance VIF info [] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1072.328230] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Creating folder: Project (3ba039e28e0146bfbc403b23f73d6b87). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1072.328731] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-90c09cab-6634-4005-8e4d-0fa7471e329d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.338993] env[62070]: DEBUG nova.network.neutron [-] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.344583] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Created folder: Project (3ba039e28e0146bfbc403b23f73d6b87) in parent group-v245319. [ 1072.344583] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Creating folder: Instances. Parent ref: group-v245512. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1072.344583] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-50e55fd9-7fce-485b-9e3e-2b440243cd44 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.357101] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Created folder: Instances in parent group-v245512. [ 1072.357101] env[62070]: DEBUG oslo.service.loopingcall [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1072.357937] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1072.358207] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1e8865ba-18ed-45d7-a9db-6b38e9050c93 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.377118] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0acc4056-1171-4151-810f-3e829b86b679 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.384427] env[62070]: DEBUG nova.network.neutron [None req-c6161150-bb59-4411-9190-3af04d1ac149 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Updating instance_info_cache with network_info: [{"id": "8372f59a-de9e-4062-be0b-39996b8f6c9e", "address": "fa:16:3e:5b:b2:cf", "network": {"id": "b8a611af-59c0-4ce4-9889-55902a16c816", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-841643028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3587df64b22e4cfc8220532cdda18c28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8372f59a-de", "ovs_interfaceid": "8372f59a-de9e-4062-be0b-39996b8f6c9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.390507] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1072.390507] env[62070]: value = "task-1122417" [ 1072.390507] env[62070]: _type = "Task" [ 1072.390507] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.395362] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122414, 'name': CopyVirtualDisk_Task} progress is 12%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.401459] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-473be3b8-bc64-4b0f-8885-fc713cc2a52b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.421698] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122417, 'name': CreateVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.442202] env[62070]: DEBUG nova.compute.manager [req-4f03aabd-fd59-4cbd-b21c-b3ffbd74e7e0 req-a255a01e-e664-4148-9168-88fc1f4cd633 service nova] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Detach interface failed, port_id=89776caa-2b48-4f16-8206-436d2f129585, reason: Instance 27d9b478-7ebb-4313-a314-679ca0292086 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1072.753189] env[62070]: INFO nova.compute.manager [None req-fc4145e3-bd31-4881-b5f2-e34b24eeef20 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Detaching volume e25cbba2-7a89-41cf-8f0c-a38272da6f0a [ 1072.790897] env[62070]: INFO nova.virt.block_device [None req-fc4145e3-bd31-4881-b5f2-e34b24eeef20 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Attempting to driver detach volume e25cbba2-7a89-41cf-8f0c-a38272da6f0a from mountpoint /dev/sdb [ 1072.791295] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc4145e3-bd31-4881-b5f2-e34b24eeef20 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Volume detach. Driver type: vmdk {{(pid=62070) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1072.791563] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc4145e3-bd31-4881-b5f2-e34b24eeef20 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245502', 'volume_id': 'e25cbba2-7a89-41cf-8f0c-a38272da6f0a', 'name': 'volume-e25cbba2-7a89-41cf-8f0c-a38272da6f0a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5cccd79d-d243-49db-8581-718dd594f3b3', 'attached_at': '', 'detached_at': '', 'volume_id': 'e25cbba2-7a89-41cf-8f0c-a38272da6f0a', 'serial': 'e25cbba2-7a89-41cf-8f0c-a38272da6f0a'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1072.792556] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46d82d8-4fcd-4873-af16-cb0deca95368 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.824617] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b533a206-8f38-4747-ab93-146c6e827e49 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.835317] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9abc46c2-2ffd-4868-88f8-266722d5924f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.843118] env[62070]: INFO nova.compute.manager [-] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Took 1.25 seconds to deallocate network for instance. [ 1072.866854] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4af851a-9bc1-4b53-ab7e-2832703cf927 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.887375] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc4145e3-bd31-4881-b5f2-e34b24eeef20 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] The volume has not been displaced from its original location: [datastore2] volume-e25cbba2-7a89-41cf-8f0c-a38272da6f0a/volume-e25cbba2-7a89-41cf-8f0c-a38272da6f0a.vmdk. No consolidation needed. {{(pid=62070) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1072.892866] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc4145e3-bd31-4881-b5f2-e34b24eeef20 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Reconfiguring VM instance instance-0000005f to detach disk 2001 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1072.897221] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c6161150-bb59-4411-9190-3af04d1ac149 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Releasing lock "refresh_cache-7bfda953-ac95-4dce-b7a7-c570eae35582" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1072.897450] env[62070]: DEBUG nova.compute.manager [None req-c6161150-bb59-4411-9190-3af04d1ac149 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Inject network info {{(pid=62070) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7219}} [ 1072.897719] env[62070]: DEBUG nova.compute.manager [None req-c6161150-bb59-4411-9190-3af04d1ac149 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] network_info to inject: |[{"id": "8372f59a-de9e-4062-be0b-39996b8f6c9e", "address": "fa:16:3e:5b:b2:cf", "network": {"id": "b8a611af-59c0-4ce4-9889-55902a16c816", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-841643028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3587df64b22e4cfc8220532cdda18c28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8372f59a-de", "ovs_interfaceid": "8372f59a-de9e-4062-be0b-39996b8f6c9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1072.902439] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c6161150-bb59-4411-9190-3af04d1ac149 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Reconfiguring VM instance to set the machine id {{(pid=62070) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1802}} [ 1072.902745] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54f2851e-c1a2-4ba1-8f0a-74e1f65d63e4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.717766] env[62070]: DEBUG oslo_concurrency.lockutils [req-72f8a120-fa61-4282-a95c-f31c5973bfba req-c6a4bce2-877e-41c5-88ac-3a166ce0a2c0 service nova] Acquired lock "refresh_cache-7bfda953-ac95-4dce-b7a7-c570eae35582" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.717992] env[62070]: DEBUG nova.network.neutron [req-72f8a120-fa61-4282-a95c-f31c5973bfba req-c6a4bce2-877e-41c5-88ac-3a166ce0a2c0 service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Refreshing network info cache for port 8372f59a-de9e-4062-be0b-39996b8f6c9e {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1073.719361] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-012c66ad-a772-4179-a701-dcf2284b9939 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.731033] env[62070]: DEBUG oslo_concurrency.lockutils [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.731610] env[62070]: DEBUG oslo_concurrency.lockutils [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.731610] env[62070]: DEBUG nova.objects.instance [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lazy-loading 'resources' on Instance uuid 27d9b478-7ebb-4313-a314-679ca0292086 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1073.749243] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122414, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.749437] env[62070]: WARNING oslo_vmware.common.loopingcall [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] task run outlasted interval by 0.36236900000000005 sec [ 1073.755667] env[62070]: DEBUG oslo_vmware.api [None req-fc4145e3-bd31-4881-b5f2-e34b24eeef20 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 1073.755667] env[62070]: value = "task-1122419" [ 1073.755667] env[62070]: _type = "Task" [ 1073.755667] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.756135] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122417, 'name': CreateVM_Task, 'duration_secs': 0.333244} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.756185] env[62070]: DEBUG oslo_vmware.api [None req-c6161150-bb59-4411-9190-3af04d1ac149 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Waiting for the task: (returnval){ [ 1073.756185] env[62070]: value = "task-1122420" [ 1073.756185] env[62070]: _type = "Task" [ 1073.756185] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.759324] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1073.763355] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1073.763579] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.763913] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1073.768209] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fa69cdd-14be-4a4d-b047-8b0feec59863 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.774628] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122414, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.782877] env[62070]: DEBUG oslo_vmware.api [None req-fc4145e3-bd31-4881-b5f2-e34b24eeef20 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122419, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.783097] env[62070]: DEBUG oslo_vmware.api [None req-c6161150-bb59-4411-9190-3af04d1ac149 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': task-1122420, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.784517] env[62070]: DEBUG oslo_vmware.api [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Waiting for the task: (returnval){ [ 1073.784517] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5295a989-6250-d08a-d4e9-79ec47557b3e" [ 1073.784517] env[62070]: _type = "Task" [ 1073.784517] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.795975] env[62070]: DEBUG oslo_vmware.api [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5295a989-6250-d08a-d4e9-79ec47557b3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.242488] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Acquiring lock "7bfda953-ac95-4dce-b7a7-c570eae35582" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.242748] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Lock "7bfda953-ac95-4dce-b7a7-c570eae35582" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.242959] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Acquiring lock "7bfda953-ac95-4dce-b7a7-c570eae35582-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.243168] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Lock "7bfda953-ac95-4dce-b7a7-c570eae35582-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.243346] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Lock "7bfda953-ac95-4dce-b7a7-c570eae35582-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.245545] env[62070]: INFO nova.compute.manager [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Terminating instance [ 1074.247692] env[62070]: DEBUG nova.compute.manager [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1074.247983] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1074.248907] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbea4dee-000b-4ccc-91da-aff0c8dc55e8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.261529] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1074.270977] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0b9583a1-acd5-46ca-b77e-679339b4bfb0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.279158] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122414, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.287504] env[62070]: DEBUG oslo_vmware.api [None req-c6161150-bb59-4411-9190-3af04d1ac149 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': task-1122420, 'name': ReconfigVM_Task, 'duration_secs': 0.166685} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.298895] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c6161150-bb59-4411-9190-3af04d1ac149 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Reconfigured VM instance to set the machine id {{(pid=62070) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1805}} [ 1074.301916] env[62070]: DEBUG oslo_vmware.api [None req-fc4145e3-bd31-4881-b5f2-e34b24eeef20 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122419, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.302094] env[62070]: DEBUG oslo_vmware.api [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Waiting for the task: (returnval){ [ 1074.302094] env[62070]: value = "task-1122421" [ 1074.302094] env[62070]: _type = "Task" [ 1074.302094] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.311020] env[62070]: DEBUG oslo_vmware.api [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5295a989-6250-d08a-d4e9-79ec47557b3e, 'name': SearchDatastore_Task, 'duration_secs': 0.025079} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.311948] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1074.312279] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1074.312543] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1074.312730] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.313070] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1074.316100] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f81dd42b-4a03-4db6-a15c-c8d0f2685c6b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.321750] env[62070]: DEBUG oslo_vmware.api [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': task-1122421, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.332277] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1074.332578] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1074.335941] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ccf4733-22b1-4e9b-9702-e036267e2640 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.348923] env[62070]: DEBUG oslo_vmware.api [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Waiting for the task: (returnval){ [ 1074.348923] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]525ade69-6153-9031-5162-3acf5c295d1c" [ 1074.348923] env[62070]: _type = "Task" [ 1074.348923] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.361109] env[62070]: DEBUG oslo_vmware.api [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]525ade69-6153-9031-5162-3acf5c295d1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.465702] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ed1b20-8119-42f7-8cb1-a83d8f50fe24 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.475178] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeaf0ce8-5e33-4ae6-8332-698f5b15cc50 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.513373] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f660ca75-9a52-46cb-8e34-f0afd3b6bf87 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.523042] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ea2686-854f-4aa8-a4de-6d1ff4bb941f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.537592] env[62070]: DEBUG nova.compute.provider_tree [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1074.552496] env[62070]: DEBUG nova.network.neutron [req-72f8a120-fa61-4282-a95c-f31c5973bfba req-c6a4bce2-877e-41c5-88ac-3a166ce0a2c0 service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Updated VIF entry in instance network info cache for port 8372f59a-de9e-4062-be0b-39996b8f6c9e. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1074.552913] env[62070]: DEBUG nova.network.neutron [req-72f8a120-fa61-4282-a95c-f31c5973bfba req-c6a4bce2-877e-41c5-88ac-3a166ce0a2c0 service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Updating instance_info_cache with network_info: [{"id": "8372f59a-de9e-4062-be0b-39996b8f6c9e", "address": "fa:16:3e:5b:b2:cf", "network": {"id": "b8a611af-59c0-4ce4-9889-55902a16c816", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-841643028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3587df64b22e4cfc8220532cdda18c28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8372f59a-de", "ovs_interfaceid": "8372f59a-de9e-4062-be0b-39996b8f6c9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1074.769671] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122414, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.719325} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.769671] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/24ed4c28-b352-4867-857b-17f9624cc455/24ed4c28-b352-4867-857b-17f9624cc455.vmdk to [datastore2] 53a1791d-38fd-4721-b82c-2f0922348300/53a1791d-38fd-4721-b82c-2f0922348300.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1074.770101] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44782122-f57a-415b-9230-35bd12689375 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.775676] env[62070]: DEBUG oslo_vmware.api [None req-fc4145e3-bd31-4881-b5f2-e34b24eeef20 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122419, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.797101] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] 53a1791d-38fd-4721-b82c-2f0922348300/53a1791d-38fd-4721-b82c-2f0922348300.vmdk or device None with type streamOptimized {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1074.797799] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2308e13-cb18-4f36-b036-50a3ad6ed118 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.823930] env[62070]: DEBUG oslo_vmware.api [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': task-1122421, 'name': PowerOffVM_Task, 'duration_secs': 0.238568} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.825285] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1074.825541] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1074.826215] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 1074.826215] env[62070]: value = "task-1122422" [ 1074.826215] env[62070]: _type = "Task" [ 1074.826215] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.826513] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e7d8f908-bd82-45fc-bc9b-c0cab44768d8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.836119] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122422, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.861109] env[62070]: DEBUG oslo_vmware.api [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]525ade69-6153-9031-5162-3acf5c295d1c, 'name': SearchDatastore_Task, 'duration_secs': 0.024117} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.861942] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a45f04d-1d90-45eb-ba24-9855046adc9d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.868818] env[62070]: DEBUG oslo_vmware.api [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Waiting for the task: (returnval){ [ 1074.868818] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5290914c-39e8-049a-0aec-8e2054a501dc" [ 1074.868818] env[62070]: _type = "Task" [ 1074.868818] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.877567] env[62070]: DEBUG oslo_vmware.api [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5290914c-39e8-049a-0aec-8e2054a501dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.914175] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1074.914175] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1074.914175] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Deleting the datastore file [datastore1] 7bfda953-ac95-4dce-b7a7-c570eae35582 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1074.914175] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e0486eb5-101f-4f8d-abac-79c9f6f886c1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.921569] env[62070]: DEBUG oslo_vmware.api [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Waiting for the task: (returnval){ [ 1074.921569] env[62070]: value = "task-1122424" [ 1074.921569] env[62070]: _type = "Task" [ 1074.921569] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.930857] env[62070]: DEBUG oslo_vmware.api [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': task-1122424, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.041502] env[62070]: DEBUG nova.scheduler.client.report [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1075.056376] env[62070]: DEBUG oslo_concurrency.lockutils [req-72f8a120-fa61-4282-a95c-f31c5973bfba req-c6a4bce2-877e-41c5-88ac-3a166ce0a2c0 service nova] Releasing lock "refresh_cache-7bfda953-ac95-4dce-b7a7-c570eae35582" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1075.274343] env[62070]: DEBUG oslo_vmware.api [None req-fc4145e3-bd31-4881-b5f2-e34b24eeef20 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122419, 'name': ReconfigVM_Task, 'duration_secs': 1.190253} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.274685] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc4145e3-bd31-4881-b5f2-e34b24eeef20 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Reconfigured VM instance instance-0000005f to detach disk 2001 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1075.279641] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6802244c-5773-428c-9657-80166abaeb18 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.296097] env[62070]: DEBUG oslo_vmware.api [None req-fc4145e3-bd31-4881-b5f2-e34b24eeef20 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 1075.296097] env[62070]: value = "task-1122425" [ 1075.296097] env[62070]: _type = "Task" [ 1075.296097] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.304969] env[62070]: DEBUG oslo_vmware.api [None req-fc4145e3-bd31-4881-b5f2-e34b24eeef20 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122425, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.338102] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122422, 'name': ReconfigVM_Task, 'duration_secs': 0.320416} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.338570] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Reconfigured VM instance instance-00000057 to attach disk [datastore2] 53a1791d-38fd-4721-b82c-2f0922348300/53a1791d-38fd-4721-b82c-2f0922348300.vmdk or device None with type streamOptimized {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1075.339271] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-456584c2-9ef5-44ab-b420-a3324e4e4c2d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.346882] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 1075.346882] env[62070]: value = "task-1122426" [ 1075.346882] env[62070]: _type = "Task" [ 1075.346882] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.356926] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122426, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.379331] env[62070]: DEBUG oslo_vmware.api [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5290914c-39e8-049a-0aec-8e2054a501dc, 'name': SearchDatastore_Task, 'duration_secs': 0.02413} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.379651] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1075.379951] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] b1137be1-b66b-4eb2-bdbd-1db6173a1f93/b1137be1-b66b-4eb2-bdbd-1db6173a1f93.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1075.380244] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-27dfa06a-e52b-45f1-a94c-5061d67068fc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.388314] env[62070]: DEBUG oslo_vmware.api [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Waiting for the task: (returnval){ [ 1075.388314] env[62070]: value = "task-1122427" [ 1075.388314] env[62070]: _type = "Task" [ 1075.388314] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.397044] env[62070]: DEBUG oslo_vmware.api [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122427, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.432370] env[62070]: DEBUG oslo_vmware.api [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': task-1122424, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.547189] env[62070]: DEBUG oslo_concurrency.lockutils [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.816s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.570835] env[62070]: INFO nova.scheduler.client.report [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Deleted allocations for instance 27d9b478-7ebb-4313-a314-679ca0292086 [ 1075.807665] env[62070]: DEBUG oslo_vmware.api [None req-fc4145e3-bd31-4881-b5f2-e34b24eeef20 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122425, 'name': ReconfigVM_Task, 'duration_secs': 0.229245} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.809026] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc4145e3-bd31-4881-b5f2-e34b24eeef20 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245502', 'volume_id': 'e25cbba2-7a89-41cf-8f0c-a38272da6f0a', 'name': 'volume-e25cbba2-7a89-41cf-8f0c-a38272da6f0a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5cccd79d-d243-49db-8581-718dd594f3b3', 'attached_at': '', 'detached_at': '', 'volume_id': 'e25cbba2-7a89-41cf-8f0c-a38272da6f0a', 'serial': 'e25cbba2-7a89-41cf-8f0c-a38272da6f0a'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1075.862975] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122426, 'name': Rename_Task, 'duration_secs': 0.299628} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.862975] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1075.862975] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-61dbed8d-43bb-4add-8689-e3ac5029d7cc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.872227] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 1075.872227] env[62070]: value = "task-1122428" [ 1075.872227] env[62070]: _type = "Task" [ 1075.872227] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.882556] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122428, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.902518] env[62070]: DEBUG oslo_vmware.api [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122427, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.935562] env[62070]: DEBUG oslo_vmware.api [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Task: {'id': task-1122424, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.5563} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.935848] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1075.936115] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1075.936320] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1075.936497] env[62070]: INFO nova.compute.manager [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Took 1.69 seconds to destroy the instance on the hypervisor. [ 1075.936753] env[62070]: DEBUG oslo.service.loopingcall [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1075.936955] env[62070]: DEBUG nova.compute.manager [-] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1075.937065] env[62070]: DEBUG nova.network.neutron [-] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1076.086356] env[62070]: DEBUG oslo_concurrency.lockutils [None req-29973fcc-09d9-4fbd-8b6b-60fb75981888 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "27d9b478-7ebb-4313-a314-679ca0292086" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.636s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1076.382922] env[62070]: DEBUG nova.objects.instance [None req-fc4145e3-bd31-4881-b5f2-e34b24eeef20 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lazy-loading 'flavor' on Instance uuid 5cccd79d-d243-49db-8581-718dd594f3b3 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1076.396520] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122428, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.403492] env[62070]: DEBUG oslo_vmware.api [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122427, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.930453} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.404096] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] b1137be1-b66b-4eb2-bdbd-1db6173a1f93/b1137be1-b66b-4eb2-bdbd-1db6173a1f93.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1076.404096] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1076.404306] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e8d509c-93a6-4181-9ddc-dc9d65e78925 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.413266] env[62070]: DEBUG oslo_vmware.api [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Waiting for the task: (returnval){ [ 1076.413266] env[62070]: value = "task-1122429" [ 1076.413266] env[62070]: _type = "Task" [ 1076.413266] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.425231] env[62070]: DEBUG oslo_vmware.api [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122429, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.650578] env[62070]: DEBUG nova.compute.manager [req-f7111b4d-dda3-4554-8c50-f7da8bb0d0d8 req-bd61afa5-71c1-402d-9558-c909f6181175 service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Received event network-vif-deleted-8372f59a-de9e-4062-be0b-39996b8f6c9e {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1076.650578] env[62070]: INFO nova.compute.manager [req-f7111b4d-dda3-4554-8c50-f7da8bb0d0d8 req-bd61afa5-71c1-402d-9558-c909f6181175 service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Neutron deleted interface 8372f59a-de9e-4062-be0b-39996b8f6c9e; detaching it from the instance and deleting it from the info cache [ 1076.650578] env[62070]: DEBUG nova.network.neutron [req-f7111b4d-dda3-4554-8c50-f7da8bb0d0d8 req-bd61afa5-71c1-402d-9558-c909f6181175 service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1076.715940] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "38573462-18e5-4ba8-ad32-6ebc7bcf7c76" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1076.716959] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "38573462-18e5-4ba8-ad32-6ebc7bcf7c76" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1076.883760] env[62070]: DEBUG oslo_vmware.api [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122428, 'name': PowerOnVM_Task, 'duration_secs': 0.617585} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.884065] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1076.924844] env[62070]: DEBUG oslo_vmware.api [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122429, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093186} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.925112] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1076.926046] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e69a14-545e-4098-b915-79d84bd53b8f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.950083] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] b1137be1-b66b-4eb2-bdbd-1db6173a1f93/b1137be1-b66b-4eb2-bdbd-1db6173a1f93.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1076.950083] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05847153-0685-474d-ab9b-bcdfc126d319 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.978289] env[62070]: DEBUG oslo_vmware.api [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Waiting for the task: (returnval){ [ 1076.978289] env[62070]: value = "task-1122430" [ 1076.978289] env[62070]: _type = "Task" [ 1076.978289] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.993753] env[62070]: DEBUG oslo_vmware.api [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122430, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.001865] env[62070]: DEBUG nova.compute.manager [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1077.004336] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a46fa03-763c-49cb-9a11-87c1a062cd02 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.122748] env[62070]: DEBUG nova.network.neutron [-] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.152334] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-373a5c08-825b-462e-bf97-67d156fb2a4f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.165862] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a35e98eb-0586-4d81-a7ba-fa015b442fd5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.202965] env[62070]: DEBUG nova.compute.manager [req-f7111b4d-dda3-4554-8c50-f7da8bb0d0d8 req-bd61afa5-71c1-402d-9558-c909f6181175 service nova] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Detach interface failed, port_id=8372f59a-de9e-4062-be0b-39996b8f6c9e, reason: Instance 7bfda953-ac95-4dce-b7a7-c570eae35582 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1077.218659] env[62070]: DEBUG nova.compute.manager [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1077.394623] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fc4145e3-bd31-4881-b5f2-e34b24eeef20 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "5cccd79d-d243-49db-8581-718dd594f3b3" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 5.146s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1077.471217] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "a3c42653-9a4b-42d3-bc38-8d46d95c8f64" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1077.471567] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "a3c42653-9a4b-42d3-bc38-8d46d95c8f64" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1077.471802] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "a3c42653-9a4b-42d3-bc38-8d46d95c8f64-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1077.472062] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "a3c42653-9a4b-42d3-bc38-8d46d95c8f64-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1077.472257] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "a3c42653-9a4b-42d3-bc38-8d46d95c8f64-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1077.474900] env[62070]: INFO nova.compute.manager [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Terminating instance [ 1077.476934] env[62070]: DEBUG nova.compute.manager [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1077.477069] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1077.478037] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc35ea54-6481-4462-8ccf-ea253e9b46fe {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.491960] env[62070]: DEBUG oslo_vmware.api [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122430, 'name': ReconfigVM_Task, 'duration_secs': 0.405791} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.494192] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Reconfigured VM instance instance-0000006a to attach disk [datastore1] b1137be1-b66b-4eb2-bdbd-1db6173a1f93/b1137be1-b66b-4eb2-bdbd-1db6173a1f93.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1077.494822] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1077.495070] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f2e3221a-559f-4561-87e7-a86e22dc32fa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.497291] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f296409-520a-45b4-8e7e-4fcd8407a3a8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.507450] env[62070]: DEBUG oslo_vmware.api [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 1077.507450] env[62070]: value = "task-1122432" [ 1077.507450] env[62070]: _type = "Task" [ 1077.507450] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.508464] env[62070]: DEBUG oslo_vmware.api [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Waiting for the task: (returnval){ [ 1077.508464] env[62070]: value = "task-1122431" [ 1077.508464] env[62070]: _type = "Task" [ 1077.508464] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.523818] env[62070]: DEBUG oslo_vmware.api [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122432, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.526781] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7aa3c262-5cfe-48c5-be13-33f7281359fe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "53a1791d-38fd-4721-b82c-2f0922348300" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 25.931s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1077.531141] env[62070]: DEBUG oslo_vmware.api [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122431, 'name': Rename_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.625958] env[62070]: INFO nova.compute.manager [-] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Took 1.69 seconds to deallocate network for instance. [ 1077.745384] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1077.745718] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1077.747724] env[62070]: INFO nova.compute.claims [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1077.784963] env[62070]: DEBUG oslo_concurrency.lockutils [None req-82a4a35c-e092-4df2-9dbe-a73d6f1c48ac tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "5cccd79d-d243-49db-8581-718dd594f3b3" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1077.785249] env[62070]: DEBUG oslo_concurrency.lockutils [None req-82a4a35c-e092-4df2-9dbe-a73d6f1c48ac tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "5cccd79d-d243-49db-8581-718dd594f3b3" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1078.024203] env[62070]: DEBUG oslo_vmware.api [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122432, 'name': PowerOffVM_Task, 'duration_secs': 0.422921} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.024546] env[62070]: DEBUG oslo_vmware.api [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122431, 'name': Rename_Task, 'duration_secs': 0.154667} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.024769] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1078.024990] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1078.025302] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1078.025569] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-744b9bac-1c54-4b6a-a475-43a2655d6271 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.027023] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8871b01f-2281-4d6b-8ec8-0caf702833f5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.035713] env[62070]: DEBUG oslo_vmware.api [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Waiting for the task: (returnval){ [ 1078.035713] env[62070]: value = "task-1122433" [ 1078.035713] env[62070]: _type = "Task" [ 1078.035713] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.045236] env[62070]: DEBUG oslo_vmware.api [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122433, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.100169] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1078.100462] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1078.100662] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Deleting the datastore file [datastore1] a3c42653-9a4b-42d3-bc38-8d46d95c8f64 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1078.101068] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c9230527-975c-4aaa-99f0-9d64522dbe72 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.108661] env[62070]: DEBUG oslo_vmware.api [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for the task: (returnval){ [ 1078.108661] env[62070]: value = "task-1122435" [ 1078.108661] env[62070]: _type = "Task" [ 1078.108661] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.117039] env[62070]: DEBUG oslo_vmware.api [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122435, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.133407] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1078.289027] env[62070]: INFO nova.compute.manager [None req-82a4a35c-e092-4df2-9dbe-a73d6f1c48ac tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Detaching volume 73eaeb2a-8061-49f0-ba0e-4c1fc41e031f [ 1078.323533] env[62070]: INFO nova.virt.block_device [None req-82a4a35c-e092-4df2-9dbe-a73d6f1c48ac tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Attempting to driver detach volume 73eaeb2a-8061-49f0-ba0e-4c1fc41e031f from mountpoint /dev/sdc [ 1078.323782] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-82a4a35c-e092-4df2-9dbe-a73d6f1c48ac tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Volume detach. Driver type: vmdk {{(pid=62070) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1078.323999] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-82a4a35c-e092-4df2-9dbe-a73d6f1c48ac tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245509', 'volume_id': '73eaeb2a-8061-49f0-ba0e-4c1fc41e031f', 'name': 'volume-73eaeb2a-8061-49f0-ba0e-4c1fc41e031f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5cccd79d-d243-49db-8581-718dd594f3b3', 'attached_at': '', 'detached_at': '', 'volume_id': '73eaeb2a-8061-49f0-ba0e-4c1fc41e031f', 'serial': '73eaeb2a-8061-49f0-ba0e-4c1fc41e031f'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1078.325046] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fee09d6-8f5c-4f44-bc6b-3780854c8ec3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.348624] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52969b40-c55e-4515-86ae-679d6d6e752b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.356665] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c691236f-20e5-438a-a9bc-0fdf4d80e63c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.378929] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56443e83-3ce9-483b-9d1d-d35f6255b12a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.393927] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-82a4a35c-e092-4df2-9dbe-a73d6f1c48ac tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] The volume has not been displaced from its original location: [datastore2] volume-73eaeb2a-8061-49f0-ba0e-4c1fc41e031f/volume-73eaeb2a-8061-49f0-ba0e-4c1fc41e031f.vmdk. No consolidation needed. {{(pid=62070) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1078.399342] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-82a4a35c-e092-4df2-9dbe-a73d6f1c48ac tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Reconfiguring VM instance instance-0000005f to detach disk 2002 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1078.400397] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5425a7ec-e4ae-4a16-88e6-d7e378115eea {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.420443] env[62070]: DEBUG oslo_vmware.api [None req-82a4a35c-e092-4df2-9dbe-a73d6f1c48ac tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 1078.420443] env[62070]: value = "task-1122436" [ 1078.420443] env[62070]: _type = "Task" [ 1078.420443] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.429676] env[62070]: DEBUG oslo_vmware.api [None req-82a4a35c-e092-4df2-9dbe-a73d6f1c48ac tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122436, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.545257] env[62070]: DEBUG oslo_vmware.api [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122433, 'name': PowerOnVM_Task, 'duration_secs': 0.470329} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.545537] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1078.545745] env[62070]: INFO nova.compute.manager [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Took 6.29 seconds to spawn the instance on the hypervisor. [ 1078.545929] env[62070]: DEBUG nova.compute.manager [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1078.546750] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91de6894-394e-4766-b611-8da590fcbc71 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.619675] env[62070]: DEBUG oslo_vmware.api [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Task: {'id': task-1122435, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136875} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.619959] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1078.620167] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1078.620353] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1078.620530] env[62070]: INFO nova.compute.manager [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1078.620781] env[62070]: DEBUG oslo.service.loopingcall [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1078.620975] env[62070]: DEBUG nova.compute.manager [-] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1078.621086] env[62070]: DEBUG nova.network.neutron [-] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1078.915680] env[62070]: DEBUG nova.compute.manager [req-85a6e764-87d7-4643-b7a9-e14f23b5ee4f req-f6504f3b-13ee-40ad-896e-00e696755583 service nova] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Received event network-vif-deleted-60d8bbf4-bc22-4c62-8ddd-d3a60ee21d1b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1078.915904] env[62070]: INFO nova.compute.manager [req-85a6e764-87d7-4643-b7a9-e14f23b5ee4f req-f6504f3b-13ee-40ad-896e-00e696755583 service nova] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Neutron deleted interface 60d8bbf4-bc22-4c62-8ddd-d3a60ee21d1b; detaching it from the instance and deleting it from the info cache [ 1078.916164] env[62070]: DEBUG nova.network.neutron [req-85a6e764-87d7-4643-b7a9-e14f23b5ee4f req-f6504f3b-13ee-40ad-896e-00e696755583 service nova] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.934693] env[62070]: DEBUG oslo_vmware.api [None req-82a4a35c-e092-4df2-9dbe-a73d6f1c48ac tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122436, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.939311] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-411c7aa7-1f19-4564-b6f9-bcb2df4861d6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.946683] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59aef5d7-a2a7-4c61-8b17-441e40dc5c03 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.977511] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce4ed39-91e9-45d8-9f37-1f94cefdb649 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.985560] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ce18a1-8187-4235-a8e1-1243627afb12 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.999959] env[62070]: DEBUG nova.compute.provider_tree [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1079.064153] env[62070]: INFO nova.compute.manager [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Took 11.15 seconds to build instance. [ 1079.394351] env[62070]: DEBUG nova.network.neutron [-] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.419692] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aa032746-03ee-4a60-978b-7faf6cab09f1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.431633] env[62070]: DEBUG oslo_vmware.api [None req-82a4a35c-e092-4df2-9dbe-a73d6f1c48ac tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122436, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.435526] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b655e8-5d68-479c-a98f-0a0bf3b307f0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.467979] env[62070]: DEBUG nova.compute.manager [req-85a6e764-87d7-4643-b7a9-e14f23b5ee4f req-f6504f3b-13ee-40ad-896e-00e696755583 service nova] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Detach interface failed, port_id=60d8bbf4-bc22-4c62-8ddd-d3a60ee21d1b, reason: Instance a3c42653-9a4b-42d3-bc38-8d46d95c8f64 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1079.503381] env[62070]: DEBUG nova.scheduler.client.report [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1079.566150] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0e627254-5e66-410e-83f6-90b7abe8c714 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Lock "b1137be1-b66b-4eb2-bdbd-1db6173a1f93" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.677s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.633355] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f305e2c-8438-4e32-a5e4-c49cf9fa651a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.640259] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bc0072b1-7ad7-4fd3-80b3-4b0f0c0b8bdd tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Suspending the VM {{(pid=62070) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1163}} [ 1079.640508] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-95266a5a-db54-4450-a994-8fc1ea9c6c0c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.647751] env[62070]: DEBUG oslo_vmware.api [None req-bc0072b1-7ad7-4fd3-80b3-4b0f0c0b8bdd tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 1079.647751] env[62070]: value = "task-1122437" [ 1079.647751] env[62070]: _type = "Task" [ 1079.647751] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.659797] env[62070]: DEBUG oslo_vmware.api [None req-bc0072b1-7ad7-4fd3-80b3-4b0f0c0b8bdd tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122437, 'name': SuspendVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.897077] env[62070]: INFO nova.compute.manager [-] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Took 1.28 seconds to deallocate network for instance. [ 1079.933509] env[62070]: DEBUG oslo_vmware.api [None req-82a4a35c-e092-4df2-9dbe-a73d6f1c48ac tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122436, 'name': ReconfigVM_Task, 'duration_secs': 1.453031} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.934494] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-82a4a35c-e092-4df2-9dbe-a73d6f1c48ac tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Reconfigured VM instance instance-0000005f to detach disk 2002 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1079.939506] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0b437ef-4307-46e5-8d79-56d6e31a9510 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.956362] env[62070]: DEBUG oslo_vmware.api [None req-82a4a35c-e092-4df2-9dbe-a73d6f1c48ac tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 1079.956362] env[62070]: value = "task-1122438" [ 1079.956362] env[62070]: _type = "Task" [ 1079.956362] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.965708] env[62070]: DEBUG oslo_vmware.api [None req-82a4a35c-e092-4df2-9dbe-a73d6f1c48ac tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122438, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.008688] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.263s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.009052] env[62070]: DEBUG nova.compute.manager [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1080.012037] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.879s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.012301] env[62070]: DEBUG nova.objects.instance [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Lazy-loading 'resources' on Instance uuid 7bfda953-ac95-4dce-b7a7-c570eae35582 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1080.122620] env[62070]: INFO nova.compute.manager [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Rebuilding instance [ 1080.158391] env[62070]: DEBUG oslo_vmware.api [None req-bc0072b1-7ad7-4fd3-80b3-4b0f0c0b8bdd tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122437, 'name': SuspendVM_Task} progress is 62%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.166029] env[62070]: DEBUG nova.compute.manager [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1080.166534] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7df37ea3-168f-41bd-a3f6-12ad5290d21c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.405211] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.467183] env[62070]: DEBUG oslo_vmware.api [None req-82a4a35c-e092-4df2-9dbe-a73d6f1c48ac tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122438, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.515075] env[62070]: DEBUG nova.compute.utils [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1080.519843] env[62070]: DEBUG nova.compute.manager [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1080.519843] env[62070]: DEBUG nova.network.neutron [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1080.568387] env[62070]: DEBUG nova.policy [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0aa820b3e16d4d6fbc6bda0b232025fc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c91e5eeeeb1742f499b2edaf76a93a3b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 1080.659327] env[62070]: DEBUG oslo_vmware.api [None req-bc0072b1-7ad7-4fd3-80b3-4b0f0c0b8bdd tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122437, 'name': SuspendVM_Task, 'duration_secs': 0.664709} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.659651] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bc0072b1-7ad7-4fd3-80b3-4b0f0c0b8bdd tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Suspended the VM {{(pid=62070) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1167}} [ 1080.659843] env[62070]: DEBUG nova.compute.manager [None req-bc0072b1-7ad7-4fd3-80b3-4b0f0c0b8bdd tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1080.660644] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae57c087-3758-48be-ae0d-4688b50cddc7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.670787] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64fc23da-b4af-404d-adf0-983002f4ffa8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.678656] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1080.678962] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5e3096b5-ad3b-4c9d-87eb-1ce19c60929e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.681217] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-087ad442-7121-4bab-915e-e14dfcdd42ff {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.715482] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf42105-4b94-42a0-8977-0562c1adf0c7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.719215] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Waiting for the task: (returnval){ [ 1080.719215] env[62070]: value = "task-1122439" [ 1080.719215] env[62070]: _type = "Task" [ 1080.719215] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.729093] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ce7052-ff85-4e4a-95af-6911b3df1b61 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.737083] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122439, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.747750] env[62070]: DEBUG nova.compute.provider_tree [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1080.968454] env[62070]: DEBUG oslo_vmware.api [None req-82a4a35c-e092-4df2-9dbe-a73d6f1c48ac tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122438, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.020895] env[62070]: DEBUG nova.compute.manager [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1081.047538] env[62070]: DEBUG nova.network.neutron [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Successfully created port: dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1081.232972] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122439, 'name': PowerOffVM_Task, 'duration_secs': 0.123497} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.232972] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1081.232972] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1081.233752] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-620d3027-a796-41ab-8d13-46c3ac1df7d4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.241168] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1081.241411] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b84efb12-891f-4273-8e24-7e2a1f288c07 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.250790] env[62070]: DEBUG nova.scheduler.client.report [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1081.271040] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1081.271377] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1081.271460] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Deleting the datastore file [datastore1] b1137be1-b66b-4eb2-bdbd-1db6173a1f93 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1081.271716] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31ebed35-1dcc-4403-928e-cf7157812308 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.278648] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Waiting for the task: (returnval){ [ 1081.278648] env[62070]: value = "task-1122441" [ 1081.278648] env[62070]: _type = "Task" [ 1081.278648] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.286817] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122441, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.468651] env[62070]: DEBUG oslo_vmware.api [None req-82a4a35c-e092-4df2-9dbe-a73d6f1c48ac tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122438, 'name': ReconfigVM_Task, 'duration_secs': 1.1799} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.468975] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-82a4a35c-e092-4df2-9dbe-a73d6f1c48ac tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245509', 'volume_id': '73eaeb2a-8061-49f0-ba0e-4c1fc41e031f', 'name': 'volume-73eaeb2a-8061-49f0-ba0e-4c1fc41e031f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5cccd79d-d243-49db-8581-718dd594f3b3', 'attached_at': '', 'detached_at': '', 'volume_id': '73eaeb2a-8061-49f0-ba0e-4c1fc41e031f', 'serial': '73eaeb2a-8061-49f0-ba0e-4c1fc41e031f'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1081.528039] env[62070]: INFO nova.virt.block_device [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Booting with volume e82aa618-eed9-4876-a9ff-8289ed9697af at /dev/sda [ 1081.560978] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f52a9f60-db6a-4de3-b8b5-fda5e69aaccb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.574150] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58fff755-f8e9-4026-ba6d-17a909aa4c46 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.609107] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7f6dc35c-d3a3-461e-91fd-43c63c4c4f20 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.618135] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce58097e-a40a-4929-81cb-ed57d18dbbd4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.649581] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-175c1ec5-2cfd-4928-87f8-ce77631371a1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.657039] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16a573b8-4f14-49af-9e87-fcbf5cc6827a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.672616] env[62070]: DEBUG nova.virt.block_device [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Updating existing volume attachment record: 95fba64c-4387-4519-9f89-3b213139bcba {{(pid=62070) _volume_attach /opt/stack/nova/nova/virt/block_device.py:679}} [ 1081.756082] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.744s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.758427] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.353s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.758722] env[62070]: DEBUG nova.objects.instance [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lazy-loading 'resources' on Instance uuid a3c42653-9a4b-42d3-bc38-8d46d95c8f64 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1081.778628] env[62070]: INFO nova.scheduler.client.report [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Deleted allocations for instance 7bfda953-ac95-4dce-b7a7-c570eae35582 [ 1081.790796] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122441, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.089367} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.791039] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1081.791231] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1081.791410] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1082.010325] env[62070]: DEBUG nova.objects.instance [None req-82a4a35c-e092-4df2-9dbe-a73d6f1c48ac tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lazy-loading 'flavor' on Instance uuid 5cccd79d-d243-49db-8581-718dd594f3b3 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1082.062694] env[62070]: INFO nova.compute.manager [None req-9c23cab7-380d-4f39-96c2-ac04a7d28fbe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Resuming [ 1082.062694] env[62070]: DEBUG nova.objects.instance [None req-9c23cab7-380d-4f39-96c2-ac04a7d28fbe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lazy-loading 'flavor' on Instance uuid 53a1791d-38fd-4721-b82c-2f0922348300 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1082.288842] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c1d7cd7-ce0f-4faf-ad4d-d5d0f04dde13 tempest-AttachInterfacesUnderV243Test-1418654099 tempest-AttachInterfacesUnderV243Test-1418654099-project-member] Lock "7bfda953-ac95-4dce-b7a7-c570eae35582" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.046s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.412582] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a8f232-a03c-443d-97aa-f6479b11d273 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.421325] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72bcfb79-e968-4c46-bbc3-9939e657936e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.453127] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae90e959-2f9e-4696-966c-87a285fe990b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.461577] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0170505c-1436-4588-8e13-726ab7eb2533 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.476434] env[62070]: DEBUG nova.compute.provider_tree [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1082.496707] env[62070]: DEBUG nova.compute.manager [req-5b7ba887-5200-4197-a2ab-ee36c2d9b0d4 req-b1c62128-928e-4933-8b2a-6889ba102d49 service nova] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Received event network-vif-plugged-dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1082.496912] env[62070]: DEBUG oslo_concurrency.lockutils [req-5b7ba887-5200-4197-a2ab-ee36c2d9b0d4 req-b1c62128-928e-4933-8b2a-6889ba102d49 service nova] Acquiring lock "38573462-18e5-4ba8-ad32-6ebc7bcf7c76-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.497179] env[62070]: DEBUG oslo_concurrency.lockutils [req-5b7ba887-5200-4197-a2ab-ee36c2d9b0d4 req-b1c62128-928e-4933-8b2a-6889ba102d49 service nova] Lock "38573462-18e5-4ba8-ad32-6ebc7bcf7c76-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.497364] env[62070]: DEBUG oslo_concurrency.lockutils [req-5b7ba887-5200-4197-a2ab-ee36c2d9b0d4 req-b1c62128-928e-4933-8b2a-6889ba102d49 service nova] Lock "38573462-18e5-4ba8-ad32-6ebc7bcf7c76-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.497540] env[62070]: DEBUG nova.compute.manager [req-5b7ba887-5200-4197-a2ab-ee36c2d9b0d4 req-b1c62128-928e-4933-8b2a-6889ba102d49 service nova] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] No waiting events found dispatching network-vif-plugged-dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1082.497778] env[62070]: WARNING nova.compute.manager [req-5b7ba887-5200-4197-a2ab-ee36c2d9b0d4 req-b1c62128-928e-4933-8b2a-6889ba102d49 service nova] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Received unexpected event network-vif-plugged-dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e for instance with vm_state building and task_state block_device_mapping. [ 1082.611889] env[62070]: DEBUG nova.network.neutron [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Successfully updated port: dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1082.828675] env[62070]: DEBUG nova.virt.hardware [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1082.829066] env[62070]: DEBUG nova.virt.hardware [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1082.829254] env[62070]: DEBUG nova.virt.hardware [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1082.829464] env[62070]: DEBUG nova.virt.hardware [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1082.829644] env[62070]: DEBUG nova.virt.hardware [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1082.829813] env[62070]: DEBUG nova.virt.hardware [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1082.830038] env[62070]: DEBUG nova.virt.hardware [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1082.830211] env[62070]: DEBUG nova.virt.hardware [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1082.830384] env[62070]: DEBUG nova.virt.hardware [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1082.830552] env[62070]: DEBUG nova.virt.hardware [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1082.830729] env[62070]: DEBUG nova.virt.hardware [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1082.831910] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3da9b9f5-7df6-489d-9099-3be35b1c0bf6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.840410] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16cac80b-2e61-4d2b-a669-05e9a655d956 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.855073] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Instance VIF info [] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1082.861017] env[62070]: DEBUG oslo.service.loopingcall [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1082.861289] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1082.861566] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-edd0eb2e-6c81-4d72-a7f0-1d7e43b849ba {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.883908] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1082.883908] env[62070]: value = "task-1122442" [ 1082.883908] env[62070]: _type = "Task" [ 1082.883908] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.894169] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122442, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.979477] env[62070]: DEBUG nova.scheduler.client.report [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1083.016628] env[62070]: DEBUG oslo_concurrency.lockutils [None req-82a4a35c-e092-4df2-9dbe-a73d6f1c48ac tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "5cccd79d-d243-49db-8581-718dd594f3b3" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 5.231s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.077614] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9c23cab7-380d-4f39-96c2-ac04a7d28fbe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquiring lock "refresh_cache-53a1791d-38fd-4721-b82c-2f0922348300" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1083.077852] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9c23cab7-380d-4f39-96c2-ac04a7d28fbe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquired lock "refresh_cache-53a1791d-38fd-4721-b82c-2f0922348300" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.077988] env[62070]: DEBUG nova.network.neutron [None req-9c23cab7-380d-4f39-96c2-ac04a7d28fbe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1083.114160] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "refresh_cache-38573462-18e5-4ba8-ad32-6ebc7bcf7c76" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1083.114348] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquired lock "refresh_cache-38573462-18e5-4ba8-ad32-6ebc7bcf7c76" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.114455] env[62070]: DEBUG nova.network.neutron [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1083.267793] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "5cccd79d-d243-49db-8581-718dd594f3b3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.267793] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "5cccd79d-d243-49db-8581-718dd594f3b3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.267942] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "5cccd79d-d243-49db-8581-718dd594f3b3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.268192] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "5cccd79d-d243-49db-8581-718dd594f3b3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.268371] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "5cccd79d-d243-49db-8581-718dd594f3b3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.270423] env[62070]: INFO nova.compute.manager [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Terminating instance [ 1083.272993] env[62070]: DEBUG nova.compute.manager [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1083.273206] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1083.274085] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce6e040-8b88-45a1-a0e6-fa225211a72e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.282914] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1083.283163] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8586a65b-b184-423c-852b-344e79f92d59 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.290903] env[62070]: DEBUG oslo_vmware.api [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 1083.290903] env[62070]: value = "task-1122443" [ 1083.290903] env[62070]: _type = "Task" [ 1083.290903] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.299728] env[62070]: DEBUG oslo_vmware.api [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122443, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.394883] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122442, 'name': CreateVM_Task, 'duration_secs': 0.301278} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.395074] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1083.397534] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1083.397534] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.397534] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1083.397534] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7520bf2-43b7-40e3-9756-f8414c7c60fb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.402414] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Waiting for the task: (returnval){ [ 1083.402414] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]520ea5d0-73f2-3b14-5b7f-c44a10e25caa" [ 1083.402414] env[62070]: _type = "Task" [ 1083.402414] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.411739] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]520ea5d0-73f2-3b14-5b7f-c44a10e25caa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.484465] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.726s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.507880] env[62070]: INFO nova.scheduler.client.report [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Deleted allocations for instance a3c42653-9a4b-42d3-bc38-8d46d95c8f64 [ 1083.660969] env[62070]: DEBUG nova.network.neutron [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1083.762383] env[62070]: DEBUG nova.compute.manager [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1083.762383] env[62070]: DEBUG nova.virt.hardware [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1083.762383] env[62070]: DEBUG nova.virt.hardware [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1083.762383] env[62070]: DEBUG nova.virt.hardware [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1083.762383] env[62070]: DEBUG nova.virt.hardware [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1083.762383] env[62070]: DEBUG nova.virt.hardware [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1083.762383] env[62070]: DEBUG nova.virt.hardware [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1083.762383] env[62070]: DEBUG nova.virt.hardware [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1083.762383] env[62070]: DEBUG nova.virt.hardware [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1083.762383] env[62070]: DEBUG nova.virt.hardware [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1083.763165] env[62070]: DEBUG nova.virt.hardware [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1083.763165] env[62070]: DEBUG nova.virt.hardware [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1083.764363] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4749434-1875-4556-b14d-23b130f3ac32 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.773071] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51723b66-9d71-43a2-b65f-ea54d706b20f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.791339] env[62070]: DEBUG nova.network.neutron [None req-9c23cab7-380d-4f39-96c2-ac04a7d28fbe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Updating instance_info_cache with network_info: [{"id": "2c6759e4-b6e7-4b67-b06d-d38d6043d3b2", "address": "fa:16:3e:11:96:b8", "network": {"id": "6a62b79f-a98b-4518-86cb-facc7b77da1d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2107556336-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "772f7fcee5f44b899b6df797e1ed5ddd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c6759e4-b6", "ovs_interfaceid": "2c6759e4-b6e7-4b67-b06d-d38d6043d3b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.807306] env[62070]: DEBUG oslo_vmware.api [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122443, 'name': PowerOffVM_Task, 'duration_secs': 0.210741} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.810054] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1083.810250] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1083.811580] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52de1073-2b9d-401d-8a74-f4d938ec88d7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.885223] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1083.885494] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1083.885721] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Deleting the datastore file [datastore2] 5cccd79d-d243-49db-8581-718dd594f3b3 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1083.886103] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ec85725e-c32a-4855-8fa5-864753bd1f16 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.893519] env[62070]: DEBUG oslo_vmware.api [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for the task: (returnval){ [ 1083.893519] env[62070]: value = "task-1122445" [ 1083.893519] env[62070]: _type = "Task" [ 1083.893519] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.907895] env[62070]: DEBUG oslo_vmware.api [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122445, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.923076] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]520ea5d0-73f2-3b14-5b7f-c44a10e25caa, 'name': SearchDatastore_Task, 'duration_secs': 0.01058} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.923076] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1083.923076] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1083.923076] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1083.923076] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.923076] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1083.923076] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-102304b6-8e89-47d9-bd26-cf3541d6b9be {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.931484] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1083.931745] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1083.932700] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc82ed8f-285e-4d8c-9029-10ba94ab1426 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.940657] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Waiting for the task: (returnval){ [ 1083.940657] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5218de65-9c9b-b28d-8f86-bd978438e1b0" [ 1083.940657] env[62070]: _type = "Task" [ 1083.940657] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.949912] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5218de65-9c9b-b28d-8f86-bd978438e1b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.015856] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b09e72a5-05ad-41b6-98a2-d8413ca74b64 tempest-ServersTestJSON-1318385680 tempest-ServersTestJSON-1318385680-project-member] Lock "a3c42653-9a4b-42d3-bc38-8d46d95c8f64" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.544s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.263229] env[62070]: DEBUG nova.network.neutron [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Updating instance_info_cache with network_info: [{"id": "dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e", "address": "fa:16:3e:19:97:b8", "network": {"id": "0d81bd04-b549-4e1f-97a2-0a0b9391dd3f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-108214409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c91e5eeeeb1742f499b2edaf76a93a3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc2ee731-fd", "ovs_interfaceid": "dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.300657] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9c23cab7-380d-4f39-96c2-ac04a7d28fbe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Releasing lock "refresh_cache-53a1791d-38fd-4721-b82c-2f0922348300" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1084.301560] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d4c6320-f954-4bb5-8899-6cdb29a83a88 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.311033] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9c23cab7-380d-4f39-96c2-ac04a7d28fbe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Resuming the VM {{(pid=62070) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1184}} [ 1084.311033] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e78c1e56-9e5d-41f6-afd7-1939aa1881c3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.317631] env[62070]: DEBUG oslo_vmware.api [None req-9c23cab7-380d-4f39-96c2-ac04a7d28fbe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 1084.317631] env[62070]: value = "task-1122446" [ 1084.317631] env[62070]: _type = "Task" [ 1084.317631] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.326725] env[62070]: DEBUG oslo_vmware.api [None req-9c23cab7-380d-4f39-96c2-ac04a7d28fbe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122446, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.404362] env[62070]: DEBUG oslo_vmware.api [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Task: {'id': task-1122445, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14006} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.404646] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1084.404833] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1084.405231] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1084.405231] env[62070]: INFO nova.compute.manager [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1084.405457] env[62070]: DEBUG oslo.service.loopingcall [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1084.405674] env[62070]: DEBUG nova.compute.manager [-] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1084.405759] env[62070]: DEBUG nova.network.neutron [-] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1084.451986] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5218de65-9c9b-b28d-8f86-bd978438e1b0, 'name': SearchDatastore_Task, 'duration_secs': 0.009105} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.452724] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c381d55-db77-43fe-90d5-3257498eb908 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.458675] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Waiting for the task: (returnval){ [ 1084.458675] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5207e7d6-8589-a483-78f7-c94988ca4649" [ 1084.458675] env[62070]: _type = "Task" [ 1084.458675] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.466426] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5207e7d6-8589-a483-78f7-c94988ca4649, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.526768] env[62070]: DEBUG nova.compute.manager [req-2702652d-4548-42c6-946c-bcc9ef9f2349 req-50092ac0-b23a-4c75-a2da-9e992e450e77 service nova] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Received event network-changed-dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1084.527040] env[62070]: DEBUG nova.compute.manager [req-2702652d-4548-42c6-946c-bcc9ef9f2349 req-50092ac0-b23a-4c75-a2da-9e992e450e77 service nova] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Refreshing instance network info cache due to event network-changed-dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1084.527182] env[62070]: DEBUG oslo_concurrency.lockutils [req-2702652d-4548-42c6-946c-bcc9ef9f2349 req-50092ac0-b23a-4c75-a2da-9e992e450e77 service nova] Acquiring lock "refresh_cache-38573462-18e5-4ba8-ad32-6ebc7bcf7c76" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1084.544358] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "000a67eb-9535-4da6-816a-b61126f11509" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1084.544656] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "000a67eb-9535-4da6-816a-b61126f11509" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1084.544917] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "000a67eb-9535-4da6-816a-b61126f11509-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1084.545121] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "000a67eb-9535-4da6-816a-b61126f11509-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1084.545337] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "000a67eb-9535-4da6-816a-b61126f11509-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.547473] env[62070]: INFO nova.compute.manager [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Terminating instance [ 1084.550073] env[62070]: DEBUG nova.compute.manager [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1084.550276] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1084.551142] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e8d5b7e-cb60-44d1-9dc6-96a8711de2cb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.559602] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1084.559853] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a0306bb2-fc06-4343-8c2a-d6db4c406fd1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.567592] env[62070]: DEBUG oslo_vmware.api [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1084.567592] env[62070]: value = "task-1122447" [ 1084.567592] env[62070]: _type = "Task" [ 1084.567592] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.577611] env[62070]: DEBUG oslo_vmware.api [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122447, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.767151] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Releasing lock "refresh_cache-38573462-18e5-4ba8-ad32-6ebc7bcf7c76" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1084.767575] env[62070]: DEBUG nova.compute.manager [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Instance network_info: |[{"id": "dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e", "address": "fa:16:3e:19:97:b8", "network": {"id": "0d81bd04-b549-4e1f-97a2-0a0b9391dd3f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-108214409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c91e5eeeeb1742f499b2edaf76a93a3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc2ee731-fd", "ovs_interfaceid": "dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1084.767925] env[62070]: DEBUG oslo_concurrency.lockutils [req-2702652d-4548-42c6-946c-bcc9ef9f2349 req-50092ac0-b23a-4c75-a2da-9e992e450e77 service nova] Acquired lock "refresh_cache-38573462-18e5-4ba8-ad32-6ebc7bcf7c76" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.768134] env[62070]: DEBUG nova.network.neutron [req-2702652d-4548-42c6-946c-bcc9ef9f2349 req-50092ac0-b23a-4c75-a2da-9e992e450e77 service nova] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Refreshing network info cache for port dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1084.769618] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:97:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cf5bfbae-a882-4d34-be33-b31e274b3077', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1084.779187] env[62070]: DEBUG oslo.service.loopingcall [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1084.783299] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1084.783299] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f6f97a34-c25b-40ca-8b6f-df5b0a6f8ed8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.810951] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1084.810951] env[62070]: value = "task-1122448" [ 1084.810951] env[62070]: _type = "Task" [ 1084.810951] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.825516] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122448, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.843109] env[62070]: DEBUG oslo_vmware.api [None req-9c23cab7-380d-4f39-96c2-ac04a7d28fbe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122446, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.972810] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5207e7d6-8589-a483-78f7-c94988ca4649, 'name': SearchDatastore_Task, 'duration_secs': 0.023646} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.972810] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1084.972810] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] b1137be1-b66b-4eb2-bdbd-1db6173a1f93/b1137be1-b66b-4eb2-bdbd-1db6173a1f93.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1084.972810] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-680ce995-175d-440d-9043-4a5918165cd5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.981937] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Waiting for the task: (returnval){ [ 1084.981937] env[62070]: value = "task-1122449" [ 1084.981937] env[62070]: _type = "Task" [ 1084.981937] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.994087] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122449, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.083009] env[62070]: DEBUG oslo_vmware.api [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122447, 'name': PowerOffVM_Task, 'duration_secs': 0.298711} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.083728] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1085.084567] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1085.085214] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7967f243-c42a-4223-adfe-8dc1336263fd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.162511] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1085.163203] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1085.163521] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Deleting the datastore file [datastore2] 000a67eb-9535-4da6-816a-b61126f11509 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1085.163851] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e6e261c7-f22a-4fe1-9535-7da075ed6380 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.174091] env[62070]: DEBUG oslo_vmware.api [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1085.174091] env[62070]: value = "task-1122451" [ 1085.174091] env[62070]: _type = "Task" [ 1085.174091] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.186478] env[62070]: DEBUG oslo_vmware.api [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122451, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.302631] env[62070]: DEBUG nova.compute.manager [req-74e17987-d9e2-4b74-bb15-4df1820f6f68 req-97648525-603c-4fe2-ab8f-1e31620ed6ea service nova] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Received event network-vif-deleted-a423c3f9-0b61-4375-9131-a98f082c1193 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1085.302995] env[62070]: INFO nova.compute.manager [req-74e17987-d9e2-4b74-bb15-4df1820f6f68 req-97648525-603c-4fe2-ab8f-1e31620ed6ea service nova] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Neutron deleted interface a423c3f9-0b61-4375-9131-a98f082c1193; detaching it from the instance and deleting it from the info cache [ 1085.302995] env[62070]: DEBUG nova.network.neutron [req-74e17987-d9e2-4b74-bb15-4df1820f6f68 req-97648525-603c-4fe2-ab8f-1e31620ed6ea service nova] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.328607] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122448, 'name': CreateVM_Task, 'duration_secs': 0.441999} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.333012] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1085.334325] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245511', 'volume_id': 'e82aa618-eed9-4876-a9ff-8289ed9697af', 'name': 'volume-e82aa618-eed9-4876-a9ff-8289ed9697af', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '38573462-18e5-4ba8-ad32-6ebc7bcf7c76', 'attached_at': '', 'detached_at': '', 'volume_id': 'e82aa618-eed9-4876-a9ff-8289ed9697af', 'serial': 'e82aa618-eed9-4876-a9ff-8289ed9697af'}, 'device_type': None, 'mount_device': '/dev/sda', 'disk_bus': None, 'delete_on_termination': True, 'attachment_id': '95fba64c-4387-4519-9f89-3b213139bcba', 'volume_type': None}], 'swap': None} {{(pid=62070) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1085.334898] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Root volume attach. Driver type: vmdk {{(pid=62070) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1085.337039] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7197c532-9ded-465f-bfaf-d27e4cf0be00 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.345574] env[62070]: DEBUG oslo_vmware.api [None req-9c23cab7-380d-4f39-96c2-ac04a7d28fbe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122446, 'name': PowerOnVM_Task, 'duration_secs': 0.577089} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.346857] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9c23cab7-380d-4f39-96c2-ac04a7d28fbe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Resumed the VM {{(pid=62070) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1189}} [ 1085.347299] env[62070]: DEBUG nova.compute.manager [None req-9c23cab7-380d-4f39-96c2-ac04a7d28fbe tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1085.350940] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b9fb7a1-b676-4025-af05-dacb9086909b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.362028] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63aa2dbd-8e44-4463-b193-37e3c3a944b9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.372876] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc5cf124-384e-4eb8-973f-fa95052a7388 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.383853] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-45e09032-b6cb-4f67-9681-4593edb36b47 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.393496] env[62070]: DEBUG oslo_vmware.api [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1085.393496] env[62070]: value = "task-1122452" [ 1085.393496] env[62070]: _type = "Task" [ 1085.393496] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.411979] env[62070]: DEBUG oslo_vmware.api [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122452, 'name': RelocateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.494782] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122449, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.699141] env[62070]: DEBUG oslo_vmware.api [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122451, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.338188} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.699848] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1085.700337] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1085.701256] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1085.701695] env[62070]: INFO nova.compute.manager [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1085.702226] env[62070]: DEBUG oslo.service.loopingcall [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1085.702650] env[62070]: DEBUG nova.compute.manager [-] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1085.704020] env[62070]: DEBUG nova.network.neutron [-] [instance: 000a67eb-9535-4da6-816a-b61126f11509] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1085.770472] env[62070]: DEBUG nova.network.neutron [-] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.806172] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-28a25db6-bd31-41a1-8505-feb76badd3a3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.820772] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f09100-3e85-4f70-87d7-1aa6fd50035f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.868834] env[62070]: DEBUG nova.compute.manager [req-74e17987-d9e2-4b74-bb15-4df1820f6f68 req-97648525-603c-4fe2-ab8f-1e31620ed6ea service nova] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Detach interface failed, port_id=a423c3f9-0b61-4375-9131-a98f082c1193, reason: Instance 5cccd79d-d243-49db-8581-718dd594f3b3 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1085.872978] env[62070]: DEBUG nova.network.neutron [req-2702652d-4548-42c6-946c-bcc9ef9f2349 req-50092ac0-b23a-4c75-a2da-9e992e450e77 service nova] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Updated VIF entry in instance network info cache for port dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1085.872978] env[62070]: DEBUG nova.network.neutron [req-2702652d-4548-42c6-946c-bcc9ef9f2349 req-50092ac0-b23a-4c75-a2da-9e992e450e77 service nova] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Updating instance_info_cache with network_info: [{"id": "dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e", "address": "fa:16:3e:19:97:b8", "network": {"id": "0d81bd04-b549-4e1f-97a2-0a0b9391dd3f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-108214409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c91e5eeeeb1742f499b2edaf76a93a3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc2ee731-fd", "ovs_interfaceid": "dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.909560] env[62070]: DEBUG oslo_vmware.api [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122452, 'name': RelocateVM_Task} progress is 42%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.995806] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122449, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539823} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.996327] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] b1137be1-b66b-4eb2-bdbd-1db6173a1f93/b1137be1-b66b-4eb2-bdbd-1db6173a1f93.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1085.996709] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1085.997019] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-59a5201b-196d-42ae-96c0-72d6ef949c29 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.008843] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Waiting for the task: (returnval){ [ 1086.008843] env[62070]: value = "task-1122453" [ 1086.008843] env[62070]: _type = "Task" [ 1086.008843] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.023456] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122453, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.273341] env[62070]: INFO nova.compute.manager [-] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Took 1.87 seconds to deallocate network for instance. [ 1086.375481] env[62070]: DEBUG oslo_concurrency.lockutils [req-2702652d-4548-42c6-946c-bcc9ef9f2349 req-50092ac0-b23a-4c75-a2da-9e992e450e77 service nova] Releasing lock "refresh_cache-38573462-18e5-4ba8-ad32-6ebc7bcf7c76" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1086.407016] env[62070]: DEBUG oslo_vmware.api [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122452, 'name': RelocateVM_Task} progress is 53%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.524454] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122453, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.108811} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.524826] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1086.526319] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa95d80-e219-4fc7-a55b-5e224da2d7ca {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.555549] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] b1137be1-b66b-4eb2-bdbd-1db6173a1f93/b1137be1-b66b-4eb2-bdbd-1db6173a1f93.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1086.555958] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce067180-9ab5-4483-b584-c19fffdadf4b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.584606] env[62070]: DEBUG nova.compute.manager [req-abff4f74-6924-4ce6-a06f-992b85067991 req-0e2aeaa0-850d-4a4f-ae8f-bf2d4d277136 service nova] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Received event network-vif-deleted-57c0b4a0-14e4-4b19-b7ee-16842d63fd74 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1086.584805] env[62070]: INFO nova.compute.manager [req-abff4f74-6924-4ce6-a06f-992b85067991 req-0e2aeaa0-850d-4a4f-ae8f-bf2d4d277136 service nova] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Neutron deleted interface 57c0b4a0-14e4-4b19-b7ee-16842d63fd74; detaching it from the instance and deleting it from the info cache [ 1086.584978] env[62070]: DEBUG nova.network.neutron [req-abff4f74-6924-4ce6-a06f-992b85067991 req-0e2aeaa0-850d-4a4f-ae8f-bf2d4d277136 service nova] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.587789] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Waiting for the task: (returnval){ [ 1086.587789] env[62070]: value = "task-1122454" [ 1086.587789] env[62070]: _type = "Task" [ 1086.587789] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.604547] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122454, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.712640] env[62070]: DEBUG nova.network.neutron [-] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.784488] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.784488] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.784488] env[62070]: DEBUG nova.objects.instance [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lazy-loading 'resources' on Instance uuid 5cccd79d-d243-49db-8581-718dd594f3b3 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1086.907244] env[62070]: DEBUG oslo_vmware.api [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122452, 'name': RelocateVM_Task} progress is 65%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.089092] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d5677c55-e724-4dbb-b847-e94197b3ea84 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.102528] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122454, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.107208] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-632f808a-e2f5-4397-be0b-9e2c466943d2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.153990] env[62070]: DEBUG nova.compute.manager [req-abff4f74-6924-4ce6-a06f-992b85067991 req-0e2aeaa0-850d-4a4f-ae8f-bf2d4d277136 service nova] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Detach interface failed, port_id=57c0b4a0-14e4-4b19-b7ee-16842d63fd74, reason: Instance 000a67eb-9535-4da6-816a-b61126f11509 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1087.218310] env[62070]: INFO nova.compute.manager [-] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Took 1.51 seconds to deallocate network for instance. [ 1087.408095] env[62070]: DEBUG oslo_vmware.api [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122452, 'name': RelocateVM_Task} progress is 78%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.451541] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-605118f0-b7f6-4680-bcbc-a7c9cd18ca2b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.463269] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ad8e35-8b87-4dbc-bda5-6f229d27bad7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.501435] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60f2e52-1ef0-4e52-89da-d2a18f473b3c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.512174] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d0081de-0611-4c6e-b77d-dd6c616c2b58 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.533289] env[62070]: DEBUG nova.compute.provider_tree [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1087.602397] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122454, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.723432] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1087.907809] env[62070]: DEBUG oslo_vmware.api [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122452, 'name': RelocateVM_Task} progress is 95%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.038142] env[62070]: DEBUG nova.scheduler.client.report [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1088.101000] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122454, 'name': ReconfigVM_Task, 'duration_secs': 1.262453} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.101578] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Reconfigured VM instance instance-0000006a to attach disk [datastore1] b1137be1-b66b-4eb2-bdbd-1db6173a1f93/b1137be1-b66b-4eb2-bdbd-1db6173a1f93.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1088.103071] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4b8212a2-ebaf-4959-8020-3bcb061eb561 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.110996] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Waiting for the task: (returnval){ [ 1088.110996] env[62070]: value = "task-1122455" [ 1088.110996] env[62070]: _type = "Task" [ 1088.110996] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.121803] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122455, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.407394] env[62070]: DEBUG oslo_vmware.api [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122452, 'name': RelocateVM_Task} progress is 97%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.542432] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.759s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.545320] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.822s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.545654] env[62070]: DEBUG nova.objects.instance [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lazy-loading 'resources' on Instance uuid 000a67eb-9535-4da6-816a-b61126f11509 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1088.561274] env[62070]: INFO nova.scheduler.client.report [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Deleted allocations for instance 5cccd79d-d243-49db-8581-718dd594f3b3 [ 1088.621702] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122455, 'name': Rename_Task, 'duration_secs': 0.231434} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.621922] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1088.622195] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ffc41b34-af91-4418-a62f-6fad415e72e4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.628092] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Waiting for the task: (returnval){ [ 1088.628092] env[62070]: value = "task-1122456" [ 1088.628092] env[62070]: _type = "Task" [ 1088.628092] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.638254] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122456, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.907617] env[62070]: DEBUG oslo_vmware.api [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122452, 'name': RelocateVM_Task, 'duration_secs': 3.504703} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.907912] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Volume attach. Driver type: vmdk {{(pid=62070) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1088.908126] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245511', 'volume_id': 'e82aa618-eed9-4876-a9ff-8289ed9697af', 'name': 'volume-e82aa618-eed9-4876-a9ff-8289ed9697af', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '38573462-18e5-4ba8-ad32-6ebc7bcf7c76', 'attached_at': '', 'detached_at': '', 'volume_id': 'e82aa618-eed9-4876-a9ff-8289ed9697af', 'serial': 'e82aa618-eed9-4876-a9ff-8289ed9697af'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1088.908950] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccdd4f96-7b27-48bb-ac27-951abb615613 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.924325] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228cd2c7-0697-4a16-8fc5-33baf276bbaf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.952484] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] volume-e82aa618-eed9-4876-a9ff-8289ed9697af/volume-e82aa618-eed9-4876-a9ff-8289ed9697af.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1088.952781] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f868551-2c82-4daf-9088-968283319fad {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.973887] env[62070]: DEBUG oslo_vmware.api [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1088.973887] env[62070]: value = "task-1122457" [ 1088.973887] env[62070]: _type = "Task" [ 1088.973887] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.982451] env[62070]: DEBUG oslo_vmware.api [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122457, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.069326] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9b7c6b21-d43d-476d-9f0a-6c85a498dad9 tempest-AttachVolumeTestJSON-1145072651 tempest-AttachVolumeTestJSON-1145072651-project-member] Lock "5cccd79d-d243-49db-8581-718dd594f3b3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.801s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.140718] env[62070]: DEBUG oslo_vmware.api [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122456, 'name': PowerOnVM_Task, 'duration_secs': 0.477666} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.143051] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1089.143316] env[62070]: DEBUG nova.compute.manager [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1089.144313] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc22ad8-f381-46f5-ac2e-20de6ce565ba {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.177490] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a34a024-15ce-48ef-9d75-a7a87204afb8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.185543] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0147e2ea-3822-4f91-bb83-917230efb26b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.218540] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d331c8b-6b12-4f19-a3b8-d8339f673ed5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.226836] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2a20f0-417a-46fb-9398-1dc57377f306 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.241618] env[62070]: DEBUG nova.compute.provider_tree [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1089.483511] env[62070]: DEBUG oslo_vmware.api [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122457, 'name': ReconfigVM_Task, 'duration_secs': 0.295681} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.483808] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Reconfigured VM instance instance-0000006b to attach disk [datastore1] volume-e82aa618-eed9-4876-a9ff-8289ed9697af/volume-e82aa618-eed9-4876-a9ff-8289ed9697af.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1089.488448] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-13b540f3-c7cb-4498-a6c1-b5498a2d2a52 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.504352] env[62070]: DEBUG oslo_vmware.api [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1089.504352] env[62070]: value = "task-1122458" [ 1089.504352] env[62070]: _type = "Task" [ 1089.504352] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.512944] env[62070]: DEBUG oslo_vmware.api [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122458, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.666246] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.744478] env[62070]: DEBUG nova.scheduler.client.report [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1090.017166] env[62070]: DEBUG oslo_vmware.api [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122458, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.178620] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Acquiring lock "b1137be1-b66b-4eb2-bdbd-1db6173a1f93" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.178941] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Lock "b1137be1-b66b-4eb2-bdbd-1db6173a1f93" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.179194] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Acquiring lock "b1137be1-b66b-4eb2-bdbd-1db6173a1f93-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.179470] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Lock "b1137be1-b66b-4eb2-bdbd-1db6173a1f93-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.179768] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Lock "b1137be1-b66b-4eb2-bdbd-1db6173a1f93-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.182262] env[62070]: INFO nova.compute.manager [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Terminating instance [ 1090.185413] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Acquiring lock "refresh_cache-b1137be1-b66b-4eb2-bdbd-1db6173a1f93" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1090.188725] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Acquired lock "refresh_cache-b1137be1-b66b-4eb2-bdbd-1db6173a1f93" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.188725] env[62070]: DEBUG nova.network.neutron [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1090.251104] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.705s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.254271] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.589s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.255810] env[62070]: DEBUG nova.objects.instance [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62070) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1090.299884] env[62070]: INFO nova.scheduler.client.report [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Deleted allocations for instance 000a67eb-9535-4da6-816a-b61126f11509 [ 1090.516093] env[62070]: DEBUG oslo_vmware.api [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122458, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.709606] env[62070]: DEBUG nova.network.neutron [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1090.778969] env[62070]: DEBUG nova.network.neutron [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.808024] env[62070]: DEBUG oslo_concurrency.lockutils [None req-cf705385-13a6-4f62-bb38-dfdd6bb83d6a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "000a67eb-9535-4da6-816a-b61126f11509" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.263s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.016559] env[62070]: DEBUG oslo_vmware.api [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122458, 'name': ReconfigVM_Task, 'duration_secs': 1.149535} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.016843] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245511', 'volume_id': 'e82aa618-eed9-4876-a9ff-8289ed9697af', 'name': 'volume-e82aa618-eed9-4876-a9ff-8289ed9697af', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '38573462-18e5-4ba8-ad32-6ebc7bcf7c76', 'attached_at': '', 'detached_at': '', 'volume_id': 'e82aa618-eed9-4876-a9ff-8289ed9697af', 'serial': 'e82aa618-eed9-4876-a9ff-8289ed9697af'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1091.017367] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-de129c77-6a6e-42ff-a53f-5486f5a08c70 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.024020] env[62070]: DEBUG oslo_vmware.api [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1091.024020] env[62070]: value = "task-1122460" [ 1091.024020] env[62070]: _type = "Task" [ 1091.024020] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.032713] env[62070]: DEBUG oslo_vmware.api [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122460, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.091095] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquiring lock "53a1791d-38fd-4721-b82c-2f0922348300" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.091406] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "53a1791d-38fd-4721-b82c-2f0922348300" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.091623] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquiring lock "53a1791d-38fd-4721-b82c-2f0922348300-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.091815] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "53a1791d-38fd-4721-b82c-2f0922348300-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.091994] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "53a1791d-38fd-4721-b82c-2f0922348300-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.094367] env[62070]: INFO nova.compute.manager [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Terminating instance [ 1091.096350] env[62070]: DEBUG nova.compute.manager [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1091.096730] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1091.097418] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38aa18f1-c90d-4c3d-bc9a-5a4011dd246e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.106075] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1091.106342] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a3e000d5-e411-4d9b-93a3-1fdc86e43974 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.114384] env[62070]: DEBUG oslo_vmware.api [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 1091.114384] env[62070]: value = "task-1122461" [ 1091.114384] env[62070]: _type = "Task" [ 1091.114384] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.123672] env[62070]: DEBUG oslo_vmware.api [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122461, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.266214] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1fe1cd47-a0f8-4513-949c-45353657dab9 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.281953] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Releasing lock "refresh_cache-b1137be1-b66b-4eb2-bdbd-1db6173a1f93" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1091.282424] env[62070]: DEBUG nova.compute.manager [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1091.282730] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1091.283753] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-581d7799-6550-44ca-8cff-f09ff76296c2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.293073] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1091.293378] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-417b6ad5-1b92-4cd4-83e3-f75932757b87 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.301643] env[62070]: DEBUG oslo_vmware.api [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Waiting for the task: (returnval){ [ 1091.301643] env[62070]: value = "task-1122462" [ 1091.301643] env[62070]: _type = "Task" [ 1091.301643] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.310815] env[62070]: DEBUG oslo_vmware.api [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122462, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.534914] env[62070]: DEBUG oslo_vmware.api [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122460, 'name': Rename_Task, 'duration_secs': 0.146122} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.535305] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1091.535620] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c7ceeca7-0381-4283-8eaf-756f5527c50f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.542859] env[62070]: DEBUG oslo_vmware.api [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1091.542859] env[62070]: value = "task-1122463" [ 1091.542859] env[62070]: _type = "Task" [ 1091.542859] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.551532] env[62070]: DEBUG oslo_vmware.api [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122463, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.626237] env[62070]: DEBUG oslo_vmware.api [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122461, 'name': PowerOffVM_Task, 'duration_secs': 0.373779} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.626514] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1091.626686] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1091.626948] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d7eb87b2-cf38-4b8d-a1e0-72ab96d21528 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.696251] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1091.696762] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1091.697039] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Deleting the datastore file [datastore2] 53a1791d-38fd-4721-b82c-2f0922348300 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1091.697342] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5426c275-0bfd-453a-bc79-b903032c8f18 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.705424] env[62070]: DEBUG oslo_vmware.api [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for the task: (returnval){ [ 1091.705424] env[62070]: value = "task-1122465" [ 1091.705424] env[62070]: _type = "Task" [ 1091.705424] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.713754] env[62070]: DEBUG oslo_vmware.api [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122465, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.811901] env[62070]: DEBUG oslo_vmware.api [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122462, 'name': PowerOffVM_Task, 'duration_secs': 0.239153} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.812263] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1091.812507] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1091.812773] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2e9eba0d-6922-4342-aa72-53b96a490107 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.850180] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1091.850437] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1091.850649] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Deleting the datastore file [datastore1] b1137be1-b66b-4eb2-bdbd-1db6173a1f93 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1091.850963] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9cc583c-440c-48f2-945c-08fb19c10bca {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.861197] env[62070]: DEBUG oslo_vmware.api [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Waiting for the task: (returnval){ [ 1091.861197] env[62070]: value = "task-1122467" [ 1091.861197] env[62070]: _type = "Task" [ 1091.861197] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.869683] env[62070]: DEBUG oslo_vmware.api [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122467, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.054273] env[62070]: DEBUG oslo_vmware.api [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122463, 'name': PowerOnVM_Task, 'duration_secs': 0.497659} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.054613] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1092.054801] env[62070]: INFO nova.compute.manager [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Took 8.29 seconds to spawn the instance on the hypervisor. [ 1092.055008] env[62070]: DEBUG nova.compute.manager [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1092.055948] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c647eb3d-df35-457d-9ba6-dbb4ecc87750 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.080308] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d6e8ecc0-57c2-4ab4-b447-22297f7057e3 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "a5cba512-9b50-4ca3-93eb-345be12dc588" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.080560] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d6e8ecc0-57c2-4ab4-b447-22297f7057e3 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "a5cba512-9b50-4ca3-93eb-345be12dc588" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.216051] env[62070]: DEBUG oslo_vmware.api [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Task: {'id': task-1122465, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.219145} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.216385] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1092.216546] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1092.216638] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1092.216817] env[62070]: INFO nova.compute.manager [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1092.217094] env[62070]: DEBUG oslo.service.loopingcall [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1092.217416] env[62070]: DEBUG nova.compute.manager [-] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1092.217416] env[62070]: DEBUG nova.network.neutron [-] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1092.372178] env[62070]: DEBUG oslo_vmware.api [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Task: {'id': task-1122467, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102175} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.372448] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1092.372640] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1092.372989] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1092.373199] env[62070]: INFO nova.compute.manager [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1092.373449] env[62070]: DEBUG oslo.service.loopingcall [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1092.373646] env[62070]: DEBUG nova.compute.manager [-] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1092.373771] env[62070]: DEBUG nova.network.neutron [-] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1092.403755] env[62070]: DEBUG nova.network.neutron [-] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1092.475801] env[62070]: DEBUG nova.compute.manager [req-69a3a2aa-95b9-44ca-bf45-8182d9d43a18 req-d65c7359-1544-4c63-84ce-8c95aae986a5 service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Received event network-vif-deleted-2c6759e4-b6e7-4b67-b06d-d38d6043d3b2 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1092.476012] env[62070]: INFO nova.compute.manager [req-69a3a2aa-95b9-44ca-bf45-8182d9d43a18 req-d65c7359-1544-4c63-84ce-8c95aae986a5 service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Neutron deleted interface 2c6759e4-b6e7-4b67-b06d-d38d6043d3b2; detaching it from the instance and deleting it from the info cache [ 1092.476213] env[62070]: DEBUG nova.network.neutron [req-69a3a2aa-95b9-44ca-bf45-8182d9d43a18 req-d65c7359-1544-4c63-84ce-8c95aae986a5 service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.575666] env[62070]: INFO nova.compute.manager [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Took 14.85 seconds to build instance. [ 1092.584371] env[62070]: INFO nova.compute.manager [None req-d6e8ecc0-57c2-4ab4-b447-22297f7057e3 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Detaching volume 0635ea59-c4ec-4e97-9bdd-1d58208eb929 [ 1092.619759] env[62070]: INFO nova.virt.block_device [None req-d6e8ecc0-57c2-4ab4-b447-22297f7057e3 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Attempting to driver detach volume 0635ea59-c4ec-4e97-9bdd-1d58208eb929 from mountpoint /dev/sdb [ 1092.620082] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6e8ecc0-57c2-4ab4-b447-22297f7057e3 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Volume detach. Driver type: vmdk {{(pid=62070) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1092.620294] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6e8ecc0-57c2-4ab4-b447-22297f7057e3 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245489', 'volume_id': '0635ea59-c4ec-4e97-9bdd-1d58208eb929', 'name': 'volume-0635ea59-c4ec-4e97-9bdd-1d58208eb929', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a5cba512-9b50-4ca3-93eb-345be12dc588', 'attached_at': '', 'detached_at': '', 'volume_id': '0635ea59-c4ec-4e97-9bdd-1d58208eb929', 'serial': '0635ea59-c4ec-4e97-9bdd-1d58208eb929'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1092.621315] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18629769-83eb-4661-8c8e-bb9bcc5eef4c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.644163] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f3fb31-a7e4-4921-9e87-818ce8a654ee {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.651931] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a44c127-5efe-47f2-8cb8-cb8086793627 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.673956] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb9e0f4f-4719-47bf-b142-ba97e7a97467 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.688844] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6e8ecc0-57c2-4ab4-b447-22297f7057e3 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] The volume has not been displaced from its original location: [datastore1] volume-0635ea59-c4ec-4e97-9bdd-1d58208eb929/volume-0635ea59-c4ec-4e97-9bdd-1d58208eb929.vmdk. No consolidation needed. {{(pid=62070) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1092.694853] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6e8ecc0-57c2-4ab4-b447-22297f7057e3 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Reconfiguring VM instance instance-00000058 to detach disk 2001 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1092.695142] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9cc5e89b-a20f-44c0-9791-3be3cc522619 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.714598] env[62070]: DEBUG oslo_vmware.api [None req-d6e8ecc0-57c2-4ab4-b447-22297f7057e3 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1092.714598] env[62070]: value = "task-1122468" [ 1092.714598] env[62070]: _type = "Task" [ 1092.714598] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.723271] env[62070]: DEBUG oslo_vmware.api [None req-d6e8ecc0-57c2-4ab4-b447-22297f7057e3 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122468, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.910021] env[62070]: DEBUG nova.network.neutron [-] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.955635] env[62070]: DEBUG nova.network.neutron [-] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.979353] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ff51271a-7248-4d71-9184-a8de5b2b02b8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.990666] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a751a66f-6aa2-4539-9644-54edeed92915 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.022678] env[62070]: DEBUG nova.compute.manager [req-69a3a2aa-95b9-44ca-bf45-8182d9d43a18 req-d65c7359-1544-4c63-84ce-8c95aae986a5 service nova] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Detach interface failed, port_id=2c6759e4-b6e7-4b67-b06d-d38d6043d3b2, reason: Instance 53a1791d-38fd-4721-b82c-2f0922348300 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1093.080209] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4ba3ce4b-98ba-4ab3-89c2-1c6da7416a72 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "38573462-18e5-4ba8-ad32-6ebc7bcf7c76" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.362s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.225058] env[62070]: DEBUG oslo_vmware.api [None req-d6e8ecc0-57c2-4ab4-b447-22297f7057e3 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122468, 'name': ReconfigVM_Task, 'duration_secs': 0.25132} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.225477] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6e8ecc0-57c2-4ab4-b447-22297f7057e3 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Reconfigured VM instance instance-00000058 to detach disk 2001 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1093.230803] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5da471c-768b-42cb-94bc-a7ea98242877 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.249214] env[62070]: DEBUG oslo_vmware.api [None req-d6e8ecc0-57c2-4ab4-b447-22297f7057e3 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1093.249214] env[62070]: value = "task-1122470" [ 1093.249214] env[62070]: _type = "Task" [ 1093.249214] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.261615] env[62070]: DEBUG oslo_vmware.api [None req-d6e8ecc0-57c2-4ab4-b447-22297f7057e3 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122470, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.273674] env[62070]: DEBUG nova.compute.manager [req-091f998f-bb3c-4224-8314-71c64f6fafce req-41e256af-88c0-4789-9787-69b42cd8b504 service nova] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Received event network-changed-e01eb485-1347-4afb-b881-62797a5b84af {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1093.274016] env[62070]: DEBUG nova.compute.manager [req-091f998f-bb3c-4224-8314-71c64f6fafce req-41e256af-88c0-4789-9787-69b42cd8b504 service nova] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Refreshing instance network info cache due to event network-changed-e01eb485-1347-4afb-b881-62797a5b84af. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1093.274288] env[62070]: DEBUG oslo_concurrency.lockutils [req-091f998f-bb3c-4224-8314-71c64f6fafce req-41e256af-88c0-4789-9787-69b42cd8b504 service nova] Acquiring lock "refresh_cache-67e99ada-a8e6-4034-b19b-5b2cb883b735" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.274489] env[62070]: DEBUG oslo_concurrency.lockutils [req-091f998f-bb3c-4224-8314-71c64f6fafce req-41e256af-88c0-4789-9787-69b42cd8b504 service nova] Acquired lock "refresh_cache-67e99ada-a8e6-4034-b19b-5b2cb883b735" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.274694] env[62070]: DEBUG nova.network.neutron [req-091f998f-bb3c-4224-8314-71c64f6fafce req-41e256af-88c0-4789-9787-69b42cd8b504 service nova] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Refreshing network info cache for port e01eb485-1347-4afb-b881-62797a5b84af {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1093.409325] env[62070]: INFO nova.compute.manager [-] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Took 1.04 seconds to deallocate network for instance. [ 1093.456567] env[62070]: INFO nova.compute.manager [-] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Took 1.24 seconds to deallocate network for instance. [ 1093.759359] env[62070]: DEBUG oslo_vmware.api [None req-d6e8ecc0-57c2-4ab4-b447-22297f7057e3 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122470, 'name': ReconfigVM_Task, 'duration_secs': 0.16109} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.759692] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6e8ecc0-57c2-4ab4-b447-22297f7057e3 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245489', 'volume_id': '0635ea59-c4ec-4e97-9bdd-1d58208eb929', 'name': 'volume-0635ea59-c4ec-4e97-9bdd-1d58208eb929', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a5cba512-9b50-4ca3-93eb-345be12dc588', 'attached_at': '', 'detached_at': '', 'volume_id': '0635ea59-c4ec-4e97-9bdd-1d58208eb929', 'serial': '0635ea59-c4ec-4e97-9bdd-1d58208eb929'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1093.918746] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.919038] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.919269] env[62070]: DEBUG nova.objects.instance [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Lazy-loading 'resources' on Instance uuid b1137be1-b66b-4eb2-bdbd-1db6173a1f93 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1093.970096] env[62070]: DEBUG nova.network.neutron [req-091f998f-bb3c-4224-8314-71c64f6fafce req-41e256af-88c0-4789-9787-69b42cd8b504 service nova] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Updated VIF entry in instance network info cache for port e01eb485-1347-4afb-b881-62797a5b84af. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1093.970459] env[62070]: DEBUG nova.network.neutron [req-091f998f-bb3c-4224-8314-71c64f6fafce req-41e256af-88c0-4789-9787-69b42cd8b504 service nova] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Updating instance_info_cache with network_info: [{"id": "e01eb485-1347-4afb-b881-62797a5b84af", "address": "fa:16:3e:d1:62:ff", "network": {"id": "0d81bd04-b549-4e1f-97a2-0a0b9391dd3f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-108214409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c91e5eeeeb1742f499b2edaf76a93a3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape01eb485-13", "ovs_interfaceid": "e01eb485-1347-4afb-b881-62797a5b84af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.972202] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1094.302406] env[62070]: DEBUG nova.objects.instance [None req-d6e8ecc0-57c2-4ab4-b447-22297f7057e3 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lazy-loading 'flavor' on Instance uuid a5cba512-9b50-4ca3-93eb-345be12dc588 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1094.473604] env[62070]: DEBUG oslo_concurrency.lockutils [req-091f998f-bb3c-4224-8314-71c64f6fafce req-41e256af-88c0-4789-9787-69b42cd8b504 service nova] Releasing lock "refresh_cache-67e99ada-a8e6-4034-b19b-5b2cb883b735" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1094.534673] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8de117e0-e3e8-4996-8845-7c78af5ac8c2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.543408] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c5c040-31d8-4e3f-b83a-5584d5c7658d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.574291] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b7a3cda-84d0-4afc-9f8d-66b64fd89c7c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.582662] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52d5a63b-788f-404f-beff-21fd34305c47 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.598013] env[62070]: DEBUG nova.compute.provider_tree [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1094.738322] env[62070]: DEBUG nova.compute.manager [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Stashing vm_state: active {{(pid=62070) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1095.101684] env[62070]: DEBUG nova.scheduler.client.report [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1095.259088] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.298396] env[62070]: DEBUG nova.compute.manager [req-7e4b42d6-4348-49ae-a67b-79874013c7d0 req-c6ebba07-a7c8-4efc-84c0-d1f2fe7a9eac service nova] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Received event network-changed-dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1095.298691] env[62070]: DEBUG nova.compute.manager [req-7e4b42d6-4348-49ae-a67b-79874013c7d0 req-c6ebba07-a7c8-4efc-84c0-d1f2fe7a9eac service nova] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Refreshing instance network info cache due to event network-changed-dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1095.298881] env[62070]: DEBUG oslo_concurrency.lockutils [req-7e4b42d6-4348-49ae-a67b-79874013c7d0 req-c6ebba07-a7c8-4efc-84c0-d1f2fe7a9eac service nova] Acquiring lock "refresh_cache-38573462-18e5-4ba8-ad32-6ebc7bcf7c76" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1095.299044] env[62070]: DEBUG oslo_concurrency.lockutils [req-7e4b42d6-4348-49ae-a67b-79874013c7d0 req-c6ebba07-a7c8-4efc-84c0-d1f2fe7a9eac service nova] Acquired lock "refresh_cache-38573462-18e5-4ba8-ad32-6ebc7bcf7c76" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.299216] env[62070]: DEBUG nova.network.neutron [req-7e4b42d6-4348-49ae-a67b-79874013c7d0 req-c6ebba07-a7c8-4efc-84c0-d1f2fe7a9eac service nova] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Refreshing network info cache for port dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1095.309812] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d6e8ecc0-57c2-4ab4-b447-22297f7057e3 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "a5cba512-9b50-4ca3-93eb-345be12dc588" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.229s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.606396] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.687s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.608948] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.637s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1095.609252] env[62070]: DEBUG nova.objects.instance [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lazy-loading 'resources' on Instance uuid 53a1791d-38fd-4721-b82c-2f0922348300 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1095.627165] env[62070]: INFO nova.scheduler.client.report [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Deleted allocations for instance b1137be1-b66b-4eb2-bdbd-1db6173a1f93 [ 1096.079197] env[62070]: DEBUG nova.network.neutron [req-7e4b42d6-4348-49ae-a67b-79874013c7d0 req-c6ebba07-a7c8-4efc-84c0-d1f2fe7a9eac service nova] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Updated VIF entry in instance network info cache for port dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1096.079617] env[62070]: DEBUG nova.network.neutron [req-7e4b42d6-4348-49ae-a67b-79874013c7d0 req-c6ebba07-a7c8-4efc-84c0-d1f2fe7a9eac service nova] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Updating instance_info_cache with network_info: [{"id": "dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e", "address": "fa:16:3e:19:97:b8", "network": {"id": "0d81bd04-b549-4e1f-97a2-0a0b9391dd3f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-108214409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c91e5eeeeb1742f499b2edaf76a93a3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc2ee731-fd", "ovs_interfaceid": "dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1096.135308] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d0f73155-991f-4359-b881-6a826cc05d29 tempest-ServerShowV254Test-1065660086 tempest-ServerShowV254Test-1065660086-project-member] Lock "b1137be1-b66b-4eb2-bdbd-1db6173a1f93" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.956s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.233884] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d51ac61-626f-4f51-af74-0166d7acc553 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.243805] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf29265-e855-4839-a200-f0c84261f1ae {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.275382] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff71247-2151-4186-9623-26ccf1575d55 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.288627] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbbfc055-60c8-40be-8f99-23e6af6ac821 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.303740] env[62070]: DEBUG nova.compute.provider_tree [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1096.317501] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "a5cba512-9b50-4ca3-93eb-345be12dc588" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1096.317802] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "a5cba512-9b50-4ca3-93eb-345be12dc588" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1096.318042] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "a5cba512-9b50-4ca3-93eb-345be12dc588-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1096.318247] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "a5cba512-9b50-4ca3-93eb-345be12dc588-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1096.318423] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "a5cba512-9b50-4ca3-93eb-345be12dc588-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.320736] env[62070]: INFO nova.compute.manager [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Terminating instance [ 1096.324773] env[62070]: DEBUG nova.compute.manager [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1096.324965] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1096.326070] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04427463-ee7b-41cb-9f93-a5f9d0abe27d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.335499] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1096.335859] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-99794185-eaf2-44c9-aef0-52a0a9fc1ef7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.343472] env[62070]: DEBUG oslo_vmware.api [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1096.343472] env[62070]: value = "task-1122472" [ 1096.343472] env[62070]: _type = "Task" [ 1096.343472] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.353965] env[62070]: DEBUG oslo_vmware.api [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122472, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.582713] env[62070]: DEBUG oslo_concurrency.lockutils [req-7e4b42d6-4348-49ae-a67b-79874013c7d0 req-c6ebba07-a7c8-4efc-84c0-d1f2fe7a9eac service nova] Releasing lock "refresh_cache-38573462-18e5-4ba8-ad32-6ebc7bcf7c76" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1096.787147] env[62070]: DEBUG nova.compute.manager [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Stashing vm_state: active {{(pid=62070) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1096.807351] env[62070]: DEBUG nova.scheduler.client.report [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1096.855322] env[62070]: DEBUG oslo_vmware.api [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122472, 'name': PowerOffVM_Task, 'duration_secs': 0.207358} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.855593] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1096.855765] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1096.856023] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-95a9af4e-bce5-4e6e-b68e-05e8e90c10b0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.925521] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1096.925870] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1096.926169] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Deleting the datastore file [datastore2] a5cba512-9b50-4ca3-93eb-345be12dc588 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1096.926585] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2aab3c9-6b4d-4726-aa14-b74ac1b770f8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.933567] env[62070]: DEBUG oslo_vmware.api [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1096.933567] env[62070]: value = "task-1122474" [ 1096.933567] env[62070]: _type = "Task" [ 1096.933567] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.941566] env[62070]: DEBUG oslo_vmware.api [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122474, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.308432] env[62070]: DEBUG oslo_concurrency.lockutils [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1097.311371] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.702s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1097.313621] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.055s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1097.330532] env[62070]: INFO nova.scheduler.client.report [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Deleted allocations for instance 53a1791d-38fd-4721-b82c-2f0922348300 [ 1097.443739] env[62070]: DEBUG oslo_vmware.api [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122474, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133037} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.444335] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1097.444528] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1097.444707] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1097.444886] env[62070]: INFO nova.compute.manager [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1097.445144] env[62070]: DEBUG oslo.service.loopingcall [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1097.445345] env[62070]: DEBUG nova.compute.manager [-] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1097.445437] env[62070]: DEBUG nova.network.neutron [-] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1097.818210] env[62070]: INFO nova.compute.claims [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1097.837643] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bae94e7f-c4b8-4eb3-b0c3-0b08847f6b91 tempest-ServersNegativeTestJSON-2028058905 tempest-ServersNegativeTestJSON-2028058905-project-member] Lock "53a1791d-38fd-4721-b82c-2f0922348300" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.746s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1097.856191] env[62070]: DEBUG nova.compute.manager [req-5fb10be9-1f5f-4bb5-bf73-e952556233ac req-58917f56-2836-4b3a-9da2-0d737ddfd1d7 service nova] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Received event network-vif-deleted-79f2a280-e16e-4dcd-9a80-21c1fc225a8c {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1097.856191] env[62070]: INFO nova.compute.manager [req-5fb10be9-1f5f-4bb5-bf73-e952556233ac req-58917f56-2836-4b3a-9da2-0d737ddfd1d7 service nova] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Neutron deleted interface 79f2a280-e16e-4dcd-9a80-21c1fc225a8c; detaching it from the instance and deleting it from the info cache [ 1097.856344] env[62070]: DEBUG nova.network.neutron [req-5fb10be9-1f5f-4bb5-bf73-e952556233ac req-58917f56-2836-4b3a-9da2-0d737ddfd1d7 service nova] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.328155] env[62070]: INFO nova.compute.resource_tracker [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Updating resource usage from migration 1d6228ce-1969-4fbc-bd2e-5377748e647e [ 1098.331909] env[62070]: DEBUG nova.network.neutron [-] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.359234] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-971bbb46-c434-46f6-847f-316793b286b2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.373917] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c56c968-2c52-418b-be88-cd488403f012 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.406359] env[62070]: DEBUG nova.compute.manager [req-5fb10be9-1f5f-4bb5-bf73-e952556233ac req-58917f56-2836-4b3a-9da2-0d737ddfd1d7 service nova] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Detach interface failed, port_id=79f2a280-e16e-4dcd-9a80-21c1fc225a8c, reason: Instance a5cba512-9b50-4ca3-93eb-345be12dc588 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1098.474756] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce2c8b5-7281-45b7-b30b-d312c23ee2d8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.483499] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0ead005-6c18-462b-8d00-e64d7ab06f91 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.517111] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-057cdc7c-8ed4-4acd-9b82-52f6724cf736 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.526486] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f725d881-1566-47bb-8a42-6474804cfb98 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.541402] env[62070]: DEBUG nova.compute.provider_tree [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1098.835171] env[62070]: INFO nova.compute.manager [-] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Took 1.39 seconds to deallocate network for instance. [ 1099.044975] env[62070]: DEBUG nova.scheduler.client.report [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1099.341986] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.550394] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.237s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1099.550611] env[62070]: INFO nova.compute.manager [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Migrating [ 1099.557661] env[62070]: DEBUG oslo_concurrency.lockutils [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.249s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1099.852993] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1099.853263] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1099.853416] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Starting heal instance info cache {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1100.074195] env[62070]: INFO nova.compute.claims [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1100.082979] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "refresh_cache-38573462-18e5-4ba8-ad32-6ebc7bcf7c76" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1100.082979] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquired lock "refresh_cache-38573462-18e5-4ba8-ad32-6ebc7bcf7c76" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.082979] env[62070]: DEBUG nova.network.neutron [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1100.584238] env[62070]: INFO nova.compute.resource_tracker [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Updating resource usage from migration 9881befb-bca4-4f20-8cfa-c50e03abd0bf [ 1100.703982] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f19fa664-4038-494a-8a32-6c21ab51e303 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.712992] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c1e7a05-184b-4eee-a6de-492aa7c966cf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.747166] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e803e675-4c6f-4a42-99e6-23987fdc4676 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.755288] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0849780c-f8d3-4cb4-a454-b08f73b9b45c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.768995] env[62070]: DEBUG nova.compute.provider_tree [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1100.845263] env[62070]: DEBUG nova.network.neutron [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Updating instance_info_cache with network_info: [{"id": "dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e", "address": "fa:16:3e:19:97:b8", "network": {"id": "0d81bd04-b549-4e1f-97a2-0a0b9391dd3f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-108214409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c91e5eeeeb1742f499b2edaf76a93a3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc2ee731-fd", "ovs_interfaceid": "dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1101.272248] env[62070]: DEBUG nova.scheduler.client.report [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1101.348512] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Releasing lock "refresh_cache-38573462-18e5-4ba8-ad32-6ebc7bcf7c76" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1101.777902] env[62070]: DEBUG oslo_concurrency.lockutils [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.220s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.778313] env[62070]: INFO nova.compute.manager [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Migrating [ 1101.785154] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.443s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1101.785388] env[62070]: DEBUG nova.objects.instance [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lazy-loading 'resources' on Instance uuid a5cba512-9b50-4ca3-93eb-345be12dc588 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1102.295668] env[62070]: DEBUG oslo_concurrency.lockutils [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "refresh_cache-fec23dd4-e956-42dd-b9a2-c8577f77cd81" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1102.295906] env[62070]: DEBUG oslo_concurrency.lockutils [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired lock "refresh_cache-fec23dd4-e956-42dd-b9a2-c8577f77cd81" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.296168] env[62070]: DEBUG nova.network.neutron [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1102.401976] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5838787d-7a4e-42dd-ae49-48077686f8ab {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.410902] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb6695b-c3f5-44a7-be31-71936159255b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.443917] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b680c3c3-78a6-4ae1-ab4d-6bcaac7c872b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.453286] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-162a7fa4-b542-4a31-9712-356e10557311 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.468068] env[62070]: DEBUG nova.compute.provider_tree [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1102.865747] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a582e0-0034-4c27-bdaa-631f95cfb314 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.885483] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Updating instance '38573462-18e5-4ba8-ad32-6ebc7bcf7c76' progress to 0 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1102.970725] env[62070]: DEBUG nova.scheduler.client.report [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1103.011458] env[62070]: DEBUG nova.network.neutron [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Updating instance_info_cache with network_info: [{"id": "933a577b-8b0c-4c0d-ae12-372e4b70b7c9", "address": "fa:16:3e:34:74:6b", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap933a577b-8b", "ovs_interfaceid": "933a577b-8b0c-4c0d-ae12-372e4b70b7c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1103.392177] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1103.392490] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0ccfc7ad-71f0-438c-a15f-66b92e7b92cb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.402307] env[62070]: DEBUG oslo_vmware.api [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1103.402307] env[62070]: value = "task-1122475" [ 1103.402307] env[62070]: _type = "Task" [ 1103.402307] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.411164] env[62070]: DEBUG oslo_vmware.api [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122475, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.475323] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.690s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.495490] env[62070]: INFO nova.scheduler.client.report [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Deleted allocations for instance a5cba512-9b50-4ca3-93eb-345be12dc588 [ 1103.513805] env[62070]: DEBUG oslo_concurrency.lockutils [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Releasing lock "refresh_cache-fec23dd4-e956-42dd-b9a2-c8577f77cd81" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1103.912920] env[62070]: DEBUG oslo_vmware.api [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122475, 'name': PowerOffVM_Task, 'duration_secs': 0.202135} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.913472] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1103.913472] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Updating instance '38573462-18e5-4ba8-ad32-6ebc7bcf7c76' progress to 17 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1104.003993] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f4d532de-c444-4aef-b392-1829c09a1840 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "a5cba512-9b50-4ca3-93eb-345be12dc588" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.686s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1104.378494] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Didn't find any instances for network info cache update. {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1104.378663] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.378881] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.379032] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.379198] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.379338] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.379480] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.379611] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62070) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1104.379761] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.419102] env[62070]: DEBUG nova.virt.hardware [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1104.419343] env[62070]: DEBUG nova.virt.hardware [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1104.419487] env[62070]: DEBUG nova.virt.hardware [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1104.419676] env[62070]: DEBUG nova.virt.hardware [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1104.419822] env[62070]: DEBUG nova.virt.hardware [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1104.419970] env[62070]: DEBUG nova.virt.hardware [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1104.420194] env[62070]: DEBUG nova.virt.hardware [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1104.420358] env[62070]: DEBUG nova.virt.hardware [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1104.420529] env[62070]: DEBUG nova.virt.hardware [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1104.420694] env[62070]: DEBUG nova.virt.hardware [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1104.420921] env[62070]: DEBUG nova.virt.hardware [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1104.426127] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-821b9e19-dc17-4f05-9fe0-a9c3e4d23550 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.443937] env[62070]: DEBUG oslo_vmware.api [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1104.443937] env[62070]: value = "task-1122476" [ 1104.443937] env[62070]: _type = "Task" [ 1104.443937] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.452578] env[62070]: DEBUG oslo_vmware.api [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122476, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.883108] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1104.883373] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1104.883584] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1104.883749] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62070) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1104.884697] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a06e00-a54e-45dc-bd4d-0ccfc12e6f74 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.893702] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1463dd7-a9c6-4b02-a67c-2325f3edd55a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.908176] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2566c8b-a99d-47b6-ba4b-6a1a6718553b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.915769] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c4d4a6-cc3b-4058-a4be-f3ae7715772b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.944867] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179914MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=62070) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1104.945035] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1104.945248] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1104.955578] env[62070]: DEBUG oslo_vmware.api [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122476, 'name': ReconfigVM_Task, 'duration_secs': 0.164222} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.955916] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Updating instance '38573462-18e5-4ba8-ad32-6ebc7bcf7c76' progress to 33 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1105.030380] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f0f24ac-b3eb-476e-93ca-c6e5be756083 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.049706] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Updating instance 'fec23dd4-e956-42dd-b9a2-c8577f77cd81' progress to 0 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1105.462393] env[62070]: DEBUG nova.virt.hardware [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1105.462651] env[62070]: DEBUG nova.virt.hardware [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1105.462847] env[62070]: DEBUG nova.virt.hardware [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1105.462992] env[62070]: DEBUG nova.virt.hardware [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1105.463160] env[62070]: DEBUG nova.virt.hardware [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1105.463313] env[62070]: DEBUG nova.virt.hardware [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1105.463519] env[62070]: DEBUG nova.virt.hardware [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1105.463680] env[62070]: DEBUG nova.virt.hardware [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1105.463934] env[62070]: DEBUG nova.virt.hardware [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1105.464154] env[62070]: DEBUG nova.virt.hardware [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1105.464337] env[62070]: DEBUG nova.virt.hardware [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1105.483616] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.483855] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.554564] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1105.554886] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f8828b19-9c7e-4376-95e3-ba5172b8fff2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.564107] env[62070]: DEBUG oslo_vmware.api [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1105.564107] env[62070]: value = "task-1122477" [ 1105.564107] env[62070]: _type = "Task" [ 1105.564107] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.572381] env[62070]: DEBUG oslo_vmware.api [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122477, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.956756] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Applying migration context for instance 38573462-18e5-4ba8-ad32-6ebc7bcf7c76 as it has an incoming, in-progress migration 1d6228ce-1969-4fbc-bd2e-5377748e647e. Migration status is migrating {{(pid=62070) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1105.957020] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Applying migration context for instance fec23dd4-e956-42dd-b9a2-c8577f77cd81 as it has an incoming, in-progress migration 9881befb-bca4-4f20-8cfa-c50e03abd0bf. Migration status is migrating {{(pid=62070) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1105.958057] env[62070]: INFO nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Updating resource usage from migration 1d6228ce-1969-4fbc-bd2e-5377748e647e [ 1105.958220] env[62070]: INFO nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Updating resource usage from migration 9881befb-bca4-4f20-8cfa-c50e03abd0bf [ 1105.972665] env[62070]: ERROR nova.compute.manager [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Setting instance vm_state to ERROR: AttributeError: 'NoneType' object has no attribute 'key' [ 1105.972665] env[62070]: ERROR nova.compute.manager [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Traceback (most recent call last): [ 1105.972665] env[62070]: ERROR nova.compute.manager [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] File "/opt/stack/nova/nova/compute/manager.py", line 10865, in _error_out_instance_on_exception [ 1105.972665] env[62070]: ERROR nova.compute.manager [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] yield [ 1105.972665] env[62070]: ERROR nova.compute.manager [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] File "/opt/stack/nova/nova/compute/manager.py", line 6105, in _resize_instance [ 1105.972665] env[62070]: ERROR nova.compute.manager [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] disk_info = self.driver.migrate_disk_and_power_off( [ 1105.972665] env[62070]: ERROR nova.compute.manager [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 264, in migrate_disk_and_power_off [ 1105.972665] env[62070]: ERROR nova.compute.manager [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] return self._vmops.migrate_disk_and_power_off(context, instance, [ 1105.972665] env[62070]: ERROR nova.compute.manager [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1467, in migrate_disk_and_power_off [ 1105.972665] env[62070]: ERROR nova.compute.manager [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] self._resize_disk(instance, vm_ref, vmdk, flavor) [ 1105.972665] env[62070]: ERROR nova.compute.manager [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1398, in _resize_disk [ 1105.972665] env[62070]: ERROR nova.compute.manager [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] self._volumeops.detach_disk_from_vm(vm_ref, instance, vmdk.device) [ 1105.972665] env[62070]: ERROR nova.compute.manager [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 121, in detach_disk_from_vm [ 1105.972665] env[62070]: ERROR nova.compute.manager [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] disk_key = device.key [ 1105.972665] env[62070]: ERROR nova.compute.manager [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] AttributeError: 'NoneType' object has no attribute 'key' [ 1105.972665] env[62070]: ERROR nova.compute.manager [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] [ 1105.978436] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 67e99ada-a8e6-4034-b19b-5b2cb883b735 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1105.978575] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance d8284a01-bbf6-4607-b2db-33bf2cd5457d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1105.978709] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 20c4fabc-fc9b-49c7-ab28-fa092ad66038 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1105.978826] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Migration 1d6228ce-1969-4fbc-bd2e-5377748e647e is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1105.978942] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 38573462-18e5-4ba8-ad32-6ebc7bcf7c76 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1105.979078] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Migration 9881befb-bca4-4f20-8cfa-c50e03abd0bf is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1105.979238] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance fec23dd4-e956-42dd-b9a2-c8577f77cd81 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1105.985845] env[62070]: DEBUG nova.compute.manager [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1106.075700] env[62070]: DEBUG oslo_vmware.api [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122477, 'name': PowerOffVM_Task, 'duration_secs': 0.193427} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.076063] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1106.076277] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Updating instance 'fec23dd4-e956-42dd-b9a2-c8577f77cd81' progress to 17 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1106.481663] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1106.482108] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=62070) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1106.482108] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1984MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=62070) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1106.492433] env[62070]: INFO nova.compute.manager [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Swapping old allocation on dict_keys(['21c7c111-1b69-4468-b2c4-5dd96014fbd6']) held by migration 1d6228ce-1969-4fbc-bd2e-5377748e647e for instance [ 1106.505353] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1106.513394] env[62070]: DEBUG nova.scheduler.client.report [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Overwriting current allocation {'allocations': {'21c7c111-1b69-4468-b2c4-5dd96014fbd6': {'resources': {'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 158}}, 'project_id': 'c91e5eeeeb1742f499b2edaf76a93a3b', 'user_id': '0aa820b3e16d4d6fbc6bda0b232025fc', 'consumer_generation': 1} on consumer 38573462-18e5-4ba8-ad32-6ebc7bcf7c76 {{(pid=62070) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1106.575386] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6103f87c-5521-41b8-b10e-98995b062359 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.582815] env[62070]: DEBUG nova.virt.hardware [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1106.583070] env[62070]: DEBUG nova.virt.hardware [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1106.583239] env[62070]: DEBUG nova.virt.hardware [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1106.583429] env[62070]: DEBUG nova.virt.hardware [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1106.583584] env[62070]: DEBUG nova.virt.hardware [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1106.583739] env[62070]: DEBUG nova.virt.hardware [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1106.583946] env[62070]: DEBUG nova.virt.hardware [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1106.584128] env[62070]: DEBUG nova.virt.hardware [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1106.584301] env[62070]: DEBUG nova.virt.hardware [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1106.584469] env[62070]: DEBUG nova.virt.hardware [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1106.584647] env[62070]: DEBUG nova.virt.hardware [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1106.591480] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac775786-59e3-4548-baf4-ed1e2c8627a1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.601976] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d0ee177-8862-4261-b66a-1af6c188e36e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.639699] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7fe3174-2441-493f-92f2-9388b7df319f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.641732] env[62070]: DEBUG oslo_vmware.api [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1106.641732] env[62070]: value = "task-1122478" [ 1106.641732] env[62070]: _type = "Task" [ 1106.641732] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.648969] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7476389f-e5e6-4fea-9c55-c7910b600f4d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.656470] env[62070]: DEBUG oslo_vmware.api [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122478, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.667193] env[62070]: DEBUG nova.compute.provider_tree [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1107.154593] env[62070]: DEBUG oslo_vmware.api [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122478, 'name': ReconfigVM_Task, 'duration_secs': 0.194724} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.155091] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Updating instance 'fec23dd4-e956-42dd-b9a2-c8577f77cd81' progress to 33 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1107.169986] env[62070]: DEBUG nova.scheduler.client.report [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1107.665091] env[62070]: DEBUG nova.virt.hardware [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1107.665320] env[62070]: DEBUG nova.virt.hardware [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1107.665485] env[62070]: DEBUG nova.virt.hardware [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1107.665734] env[62070]: DEBUG nova.virt.hardware [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1107.665847] env[62070]: DEBUG nova.virt.hardware [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1107.665989] env[62070]: DEBUG nova.virt.hardware [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1107.666212] env[62070]: DEBUG nova.virt.hardware [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1107.666374] env[62070]: DEBUG nova.virt.hardware [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1107.666574] env[62070]: DEBUG nova.virt.hardware [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1107.666806] env[62070]: DEBUG nova.virt.hardware [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1107.666931] env[62070]: DEBUG nova.virt.hardware [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1107.672175] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Reconfiguring VM instance instance-00000067 to detach disk 2000 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1107.672925] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74be9f47-dbc7-4022-9728-8482a7d91efb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.686221] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62070) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1107.686399] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.741s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1107.686655] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.181s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.688154] env[62070]: INFO nova.compute.claims [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1107.690765] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1107.690765] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Cleaning up deleted instances with incomplete migration {{(pid=62070) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 1107.698440] env[62070]: DEBUG oslo_vmware.api [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1107.698440] env[62070]: value = "task-1122479" [ 1107.698440] env[62070]: _type = "Task" [ 1107.698440] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.708100] env[62070]: DEBUG oslo_vmware.api [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122479, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.745368] env[62070]: DEBUG oslo_concurrency.lockutils [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.745627] env[62070]: DEBUG oslo_concurrency.lockutils [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.745804] env[62070]: INFO nova.compute.manager [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Shelving [ 1107.915284] env[62070]: DEBUG oslo_concurrency.lockutils [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "38573462-18e5-4ba8-ad32-6ebc7bcf7c76" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.915512] env[62070]: DEBUG oslo_concurrency.lockutils [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "38573462-18e5-4ba8-ad32-6ebc7bcf7c76" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.915729] env[62070]: DEBUG oslo_concurrency.lockutils [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "38573462-18e5-4ba8-ad32-6ebc7bcf7c76-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.915941] env[62070]: DEBUG oslo_concurrency.lockutils [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "38573462-18e5-4ba8-ad32-6ebc7bcf7c76-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.916140] env[62070]: DEBUG oslo_concurrency.lockutils [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "38573462-18e5-4ba8-ad32-6ebc7bcf7c76-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1107.918324] env[62070]: INFO nova.compute.manager [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Terminating instance [ 1107.920261] env[62070]: DEBUG nova.compute.manager [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1107.920477] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1107.920716] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4aa23c0b-230b-4275-a054-d9784544cbde {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.927707] env[62070]: DEBUG oslo_vmware.api [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1107.927707] env[62070]: value = "task-1122480" [ 1107.927707] env[62070]: _type = "Task" [ 1107.927707] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.938432] env[62070]: DEBUG oslo_vmware.api [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122480, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.068356] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1108.209473] env[62070]: DEBUG oslo_vmware.api [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122479, 'name': ReconfigVM_Task, 'duration_secs': 0.172872} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.209810] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Reconfigured VM instance instance-00000067 to detach disk 2000 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1108.210514] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-157b4ebf-134a-4abb-bd7a-3ee22f796b96 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.232757] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] fec23dd4-e956-42dd-b9a2-c8577f77cd81/fec23dd4-e956-42dd-b9a2-c8577f77cd81.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1108.233223] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51c90789-701d-4787-bafe-8107cb535883 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.253501] env[62070]: DEBUG oslo_vmware.api [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1108.253501] env[62070]: value = "task-1122481" [ 1108.253501] env[62070]: _type = "Task" [ 1108.253501] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.253928] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1108.254217] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-33609ce6-8fa5-438e-853d-c6dea9c6a102 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.264093] env[62070]: DEBUG oslo_vmware.api [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122481, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.265344] env[62070]: DEBUG oslo_vmware.api [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1108.265344] env[62070]: value = "task-1122482" [ 1108.265344] env[62070]: _type = "Task" [ 1108.265344] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.272792] env[62070]: DEBUG oslo_vmware.api [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122482, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.440618] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] VM already powered off {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1108.441066] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Volume detach. Driver type: vmdk {{(pid=62070) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1108.441335] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245511', 'volume_id': 'e82aa618-eed9-4876-a9ff-8289ed9697af', 'name': 'volume-e82aa618-eed9-4876-a9ff-8289ed9697af', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '38573462-18e5-4ba8-ad32-6ebc7bcf7c76', 'attached_at': '', 'detached_at': '', 'volume_id': 'e82aa618-eed9-4876-a9ff-8289ed9697af', 'serial': 'e82aa618-eed9-4876-a9ff-8289ed9697af'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1108.442281] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03057057-326f-4cb0-b01a-4747c138fba7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.461418] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b935d24-6e37-4a92-b74b-fe7557e64933 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.469088] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdabd469-b4ed-4dcf-b010-2e83a95c25dd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.487563] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf63182-753a-4483-a251-ab8764529df0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.503743] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] The volume has not been displaced from its original location: [datastore1] volume-e82aa618-eed9-4876-a9ff-8289ed9697af/volume-e82aa618-eed9-4876-a9ff-8289ed9697af.vmdk. No consolidation needed. {{(pid=62070) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1108.508935] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Reconfiguring VM instance instance-0000006b to detach disk 2000 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1108.509284] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9636c8b0-a4ba-4e99-a4ff-dd5e897077e8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.528281] env[62070]: DEBUG oslo_vmware.api [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1108.528281] env[62070]: value = "task-1122483" [ 1108.528281] env[62070]: _type = "Task" [ 1108.528281] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.539249] env[62070]: DEBUG oslo_vmware.api [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122483, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.766939] env[62070]: DEBUG oslo_vmware.api [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122481, 'name': ReconfigVM_Task, 'duration_secs': 0.266726} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.770118] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Reconfigured VM instance instance-00000067 to attach disk [datastore1] fec23dd4-e956-42dd-b9a2-c8577f77cd81/fec23dd4-e956-42dd-b9a2-c8577f77cd81.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1108.770446] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Updating instance 'fec23dd4-e956-42dd-b9a2-c8577f77cd81' progress to 50 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1108.779951] env[62070]: DEBUG oslo_vmware.api [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122482, 'name': PowerOffVM_Task, 'duration_secs': 0.185424} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.780370] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1108.781117] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4451015b-a52f-4bdf-b3a1-1457c81fd52f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.803071] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eda28e2-4821-497e-b2a5-7ffb2349dab6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.863447] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1108.863662] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1108.915588] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd42888f-e372-41a4-9259-4e4cebed4de1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.924192] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1eee4f8-9a3d-43d2-b258-9d22d25ab5e6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.962479] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f02b88f-e3ff-4ec6-8b62-7abba4849a31 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.970293] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f46d370-b032-4d6b-9f5b-4ea8bf6a6885 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.983411] env[62070]: DEBUG nova.compute.provider_tree [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1109.037591] env[62070]: DEBUG oslo_vmware.api [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122483, 'name': ReconfigVM_Task, 'duration_secs': 0.169594} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.037865] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Reconfigured VM instance instance-0000006b to detach disk 2000 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1109.042449] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c21ec232-3a3d-4c95-b8c0-bedc1f809366 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.057966] env[62070]: DEBUG oslo_vmware.api [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1109.057966] env[62070]: value = "task-1122484" [ 1109.057966] env[62070]: _type = "Task" [ 1109.057966] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.068272] env[62070]: DEBUG oslo_vmware.api [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122484, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.240757] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b7313fb3-2290-4fbd-8792-ef8ba59c6633 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "38573462-18e5-4ba8-ad32-6ebc7bcf7c76" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1109.277475] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0796c6bb-a86b-4a63-a65e-8d48ddc3a4ab {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.297588] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22cabc7b-47ef-4a5c-b567-19baac97b315 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.316713] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Creating Snapshot of the VM instance {{(pid=62070) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1109.317076] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Updating instance 'fec23dd4-e956-42dd-b9a2-c8577f77cd81' progress to 67 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1109.320417] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-668ab7dd-9363-46d0-9235-282664f5caaf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.328813] env[62070]: DEBUG oslo_vmware.api [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1109.328813] env[62070]: value = "task-1122485" [ 1109.328813] env[62070]: _type = "Task" [ 1109.328813] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.337820] env[62070]: DEBUG oslo_vmware.api [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122485, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.369419] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.369707] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Starting heal instance info cache {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1109.369707] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Rebuilding the list of instances to heal {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1109.486840] env[62070]: DEBUG nova.scheduler.client.report [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1109.568920] env[62070]: DEBUG oslo_vmware.api [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122484, 'name': ReconfigVM_Task, 'duration_secs': 0.122643} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.569268] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245511', 'volume_id': 'e82aa618-eed9-4876-a9ff-8289ed9697af', 'name': 'volume-e82aa618-eed9-4876-a9ff-8289ed9697af', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '38573462-18e5-4ba8-ad32-6ebc7bcf7c76', 'attached_at': '', 'detached_at': '', 'volume_id': 'e82aa618-eed9-4876-a9ff-8289ed9697af', 'serial': 'e82aa618-eed9-4876-a9ff-8289ed9697af'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1109.569548] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1109.570334] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09817788-8ce4-48bd-8012-7b4e95ab0d3a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.576786] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1109.577037] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5560e482-e88c-4f0f-b17c-53c88eb8e663 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.647357] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1109.647593] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1109.647787] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Deleting the datastore file [datastore1] 38573462-18e5-4ba8-ad32-6ebc7bcf7c76 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1109.648075] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-68690b24-74e5-4366-96e3-4f5b589b7892 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.655879] env[62070]: DEBUG oslo_vmware.api [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1109.655879] env[62070]: value = "task-1122487" [ 1109.655879] env[62070]: _type = "Task" [ 1109.655879] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.664774] env[62070]: DEBUG oslo_vmware.api [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122487, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.838254] env[62070]: DEBUG oslo_vmware.api [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122485, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.860570] env[62070]: DEBUG nova.network.neutron [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Port 933a577b-8b0c-4c0d-ae12-372e4b70b7c9 binding to destination host cpu-1 is already ACTIVE {{(pid=62070) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1109.873572] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Skipping network cache update for instance because it is being deleted. {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1109.873740] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Skipping network cache update for instance because it is Building. {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1109.903621] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "refresh_cache-67e99ada-a8e6-4034-b19b-5b2cb883b735" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1109.903773] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquired lock "refresh_cache-67e99ada-a8e6-4034-b19b-5b2cb883b735" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.903921] env[62070]: DEBUG nova.network.neutron [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Forcefully refreshing network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1109.904090] env[62070]: DEBUG nova.objects.instance [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lazy-loading 'info_cache' on Instance uuid 67e99ada-a8e6-4034-b19b-5b2cb883b735 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1109.991756] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.305s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.992481] env[62070]: DEBUG nova.compute.manager [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1109.996548] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.928s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1110.099149] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a0970e1-e8e3-4002-9874-85899b5e0601 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.106917] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89ab94df-7f9f-4d59-922f-cebf9196a1c7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.135816] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c8d2ba5-fd70-4eb8-828c-3e425cb559f0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.142607] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b50437cc-588e-467e-a6a7-b17f5a86f674 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.155321] env[62070]: DEBUG nova.compute.provider_tree [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1110.163843] env[62070]: DEBUG oslo_vmware.api [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122487, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080074} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.164617] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1110.164842] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1110.165042] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1110.165227] env[62070]: INFO nova.compute.manager [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Took 2.24 seconds to destroy the instance on the hypervisor. [ 1110.165464] env[62070]: DEBUG oslo.service.loopingcall [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1110.165655] env[62070]: DEBUG nova.compute.manager [-] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1110.165750] env[62070]: DEBUG nova.network.neutron [-] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1110.339404] env[62070]: DEBUG oslo_vmware.api [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122485, 'name': CreateSnapshot_Task, 'duration_secs': 0.893505} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.339836] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Created Snapshot of the VM instance {{(pid=62070) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1110.340503] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11fc0c30-d5dd-4325-917e-0642f14cca9d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.500550] env[62070]: DEBUG nova.compute.utils [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1110.501980] env[62070]: DEBUG nova.compute.manager [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1110.502165] env[62070]: DEBUG nova.network.neutron [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1110.541167] env[62070]: DEBUG nova.policy [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a91eca948b964f1885f1effb82ea35dc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '735d24ccc5614660a5b34d77af648f94', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 1110.544248] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "d8284a01-bbf6-4607-b2db-33bf2cd5457d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1110.544487] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "d8284a01-bbf6-4607-b2db-33bf2cd5457d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1110.544689] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "d8284a01-bbf6-4607-b2db-33bf2cd5457d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1110.544873] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "d8284a01-bbf6-4607-b2db-33bf2cd5457d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1110.545053] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "d8284a01-bbf6-4607-b2db-33bf2cd5457d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1110.547244] env[62070]: INFO nova.compute.manager [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Terminating instance [ 1110.551232] env[62070]: DEBUG nova.compute.manager [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1110.551437] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1110.552805] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a1d336-2841-49da-a932-a92cf4e93b88 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.560384] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1110.560616] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c17a3ce7-fa98-41d2-b80c-6922bb18ee1b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.566562] env[62070]: DEBUG oslo_vmware.api [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1110.566562] env[62070]: value = "task-1122488" [ 1110.566562] env[62070]: _type = "Task" [ 1110.566562] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.574837] env[62070]: DEBUG oslo_vmware.api [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122488, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.639141] env[62070]: DEBUG nova.compute.manager [req-fe86a450-dd95-4f8f-9277-a3ffe1baca7c req-2b36f2ad-bc1f-432f-90db-253f8e5840b3 service nova] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Received event network-vif-deleted-dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1110.639406] env[62070]: INFO nova.compute.manager [req-fe86a450-dd95-4f8f-9277-a3ffe1baca7c req-2b36f2ad-bc1f-432f-90db-253f8e5840b3 service nova] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Neutron deleted interface dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e; detaching it from the instance and deleting it from the info cache [ 1110.639478] env[62070]: DEBUG nova.network.neutron [req-fe86a450-dd95-4f8f-9277-a3ffe1baca7c req-2b36f2ad-bc1f-432f-90db-253f8e5840b3 service nova] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.661312] env[62070]: DEBUG nova.scheduler.client.report [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1110.829108] env[62070]: DEBUG nova.network.neutron [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Successfully created port: 38ef183a-1894-4b0e-b9e6-d551818dc34c {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1110.860551] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Creating linked-clone VM from snapshot {{(pid=62070) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1110.861045] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9f389ee4-98e3-4fc0-a7a1-88dae8816c57 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.876965] env[62070]: DEBUG oslo_vmware.api [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1110.876965] env[62070]: value = "task-1122489" [ 1110.876965] env[62070]: _type = "Task" [ 1110.876965] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.883451] env[62070]: DEBUG oslo_concurrency.lockutils [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "fec23dd4-e956-42dd-b9a2-c8577f77cd81-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1110.883676] env[62070]: DEBUG oslo_concurrency.lockutils [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "fec23dd4-e956-42dd-b9a2-c8577f77cd81-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1110.883853] env[62070]: DEBUG oslo_concurrency.lockutils [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "fec23dd4-e956-42dd-b9a2-c8577f77cd81-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1110.893716] env[62070]: DEBUG oslo_vmware.api [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122489, 'name': CloneVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.006408] env[62070]: DEBUG nova.compute.manager [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1111.077652] env[62070]: DEBUG oslo_vmware.api [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122488, 'name': PowerOffVM_Task, 'duration_secs': 0.17367} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.077950] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1111.078194] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1111.078497] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-595e1c7a-52d9-4bd2-8da6-0d19618e86b3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.104159] env[62070]: DEBUG nova.network.neutron [-] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.143680] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1111.143914] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1111.144117] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Deleting the datastore file [datastore1] d8284a01-bbf6-4607-b2db-33bf2cd5457d {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1111.144367] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e083bf61-a0c6-424f-aa05-0ea490e817a3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.146722] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-69093eff-8ed2-4609-adcd-7b03121787aa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.154960] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f16f3bd-779e-472d-a4e3-ab41897758df {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.168747] env[62070]: DEBUG oslo_concurrency.lockutils [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.172s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.168988] env[62070]: INFO nova.compute.manager [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Successfully reverted task state from resize_migrating on failure for instance. [ 1111.176583] env[62070]: DEBUG oslo_vmware.api [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1111.176583] env[62070]: value = "task-1122491" [ 1111.176583] env[62070]: _type = "Task" [ 1111.176583] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server [None req-2cb3cba3-07f0-4006-ba36-22b4a0263356 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Exception during message handling: AttributeError: 'NoneType' object has no attribute 'key' [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 172, in _process_incoming [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server raise self.value [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server raise self.value [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server raise self.value [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6071, in resize_instance [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server raise self.value [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6068, in resize_instance [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server self._resize_instance(context, instance, image, migration, [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6105, in _resize_instance [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server disk_info = self.driver.migrate_disk_and_power_off( [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 264, in migrate_disk_and_power_off [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server return self._vmops.migrate_disk_and_power_off(context, instance, [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1467, in migrate_disk_and_power_off [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server self._resize_disk(instance, vm_ref, vmdk, flavor) [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1398, in _resize_disk [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server self._volumeops.detach_disk_from_vm(vm_ref, instance, vmdk.device) [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 121, in detach_disk_from_vm [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server disk_key = device.key [ 1111.178196] env[62070]: ERROR oslo_messaging.rpc.server AttributeError: 'NoneType' object has no attribute 'key' [ 1111.180815] env[62070]: ERROR oslo_messaging.rpc.server [ 1111.192452] env[62070]: DEBUG nova.compute.manager [req-fe86a450-dd95-4f8f-9277-a3ffe1baca7c req-2b36f2ad-bc1f-432f-90db-253f8e5840b3 service nova] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Detach interface failed, port_id=dc2ee731-fdfd-479f-ba45-d8a9e7f50a0e, reason: Instance 38573462-18e5-4ba8-ad32-6ebc7bcf7c76 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1111.195234] env[62070]: DEBUG oslo_vmware.api [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122491, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.391419] env[62070]: DEBUG oslo_vmware.api [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122489, 'name': CloneVM_Task} progress is 94%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.606870] env[62070]: INFO nova.compute.manager [-] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Took 1.44 seconds to deallocate network for instance. [ 1111.608175] env[62070]: DEBUG nova.network.neutron [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Updating instance_info_cache with network_info: [{"id": "e01eb485-1347-4afb-b881-62797a5b84af", "address": "fa:16:3e:d1:62:ff", "network": {"id": "0d81bd04-b549-4e1f-97a2-0a0b9391dd3f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-108214409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c91e5eeeeb1742f499b2edaf76a93a3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape01eb485-13", "ovs_interfaceid": "e01eb485-1347-4afb-b881-62797a5b84af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.687925] env[62070]: DEBUG oslo_vmware.api [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122491, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193725} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.688209] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1111.688401] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1111.688583] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1111.688801] env[62070]: INFO nova.compute.manager [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1111.689081] env[62070]: DEBUG oslo.service.loopingcall [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1111.689291] env[62070]: DEBUG nova.compute.manager [-] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1111.689384] env[62070]: DEBUG nova.network.neutron [-] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1111.891303] env[62070]: DEBUG oslo_vmware.api [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122489, 'name': CloneVM_Task} progress is 94%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.941486] env[62070]: DEBUG oslo_concurrency.lockutils [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "refresh_cache-fec23dd4-e956-42dd-b9a2-c8577f77cd81" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1111.941680] env[62070]: DEBUG oslo_concurrency.lockutils [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired lock "refresh_cache-fec23dd4-e956-42dd-b9a2-c8577f77cd81" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.941865] env[62070]: DEBUG nova.network.neutron [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1112.019006] env[62070]: DEBUG nova.compute.manager [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1112.044357] env[62070]: DEBUG nova.virt.hardware [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1112.044648] env[62070]: DEBUG nova.virt.hardware [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1112.044837] env[62070]: DEBUG nova.virt.hardware [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1112.045056] env[62070]: DEBUG nova.virt.hardware [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1112.045226] env[62070]: DEBUG nova.virt.hardware [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1112.045344] env[62070]: DEBUG nova.virt.hardware [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1112.045592] env[62070]: DEBUG nova.virt.hardware [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1112.045774] env[62070]: DEBUG nova.virt.hardware [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1112.045963] env[62070]: DEBUG nova.virt.hardware [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1112.046184] env[62070]: DEBUG nova.virt.hardware [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1112.046409] env[62070]: DEBUG nova.virt.hardware [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1112.047327] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cc57a33-ab87-4eed-a3d2-28c41d2024f7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.055966] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc5d7e2-4534-4a3c-b9ad-5b1e571a246f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.112790] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Releasing lock "refresh_cache-67e99ada-a8e6-4034-b19b-5b2cb883b735" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1112.113036] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Updated the network info_cache for instance {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1112.113281] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1112.114057] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Cleaning up deleted instances {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1112.156317] env[62070]: INFO nova.compute.manager [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Took 0.55 seconds to detach 1 volumes for instance. [ 1112.158871] env[62070]: DEBUG nova.compute.manager [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Deleting volume: e82aa618-eed9-4876-a9ff-8289ed9697af {{(pid=62070) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3247}} [ 1112.233054] env[62070]: DEBUG nova.compute.manager [req-ebc65897-8836-4440-8a61-1c97352d9b23 req-f29e7ecb-70ce-42fc-ad81-7091b6076a5d service nova] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Received event network-vif-plugged-38ef183a-1894-4b0e-b9e6-d551818dc34c {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1112.234676] env[62070]: DEBUG oslo_concurrency.lockutils [req-ebc65897-8836-4440-8a61-1c97352d9b23 req-f29e7ecb-70ce-42fc-ad81-7091b6076a5d service nova] Acquiring lock "8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1112.234676] env[62070]: DEBUG oslo_concurrency.lockutils [req-ebc65897-8836-4440-8a61-1c97352d9b23 req-f29e7ecb-70ce-42fc-ad81-7091b6076a5d service nova] Lock "8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1112.234676] env[62070]: DEBUG oslo_concurrency.lockutils [req-ebc65897-8836-4440-8a61-1c97352d9b23 req-f29e7ecb-70ce-42fc-ad81-7091b6076a5d service nova] Lock "8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1112.234676] env[62070]: DEBUG nova.compute.manager [req-ebc65897-8836-4440-8a61-1c97352d9b23 req-f29e7ecb-70ce-42fc-ad81-7091b6076a5d service nova] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] No waiting events found dispatching network-vif-plugged-38ef183a-1894-4b0e-b9e6-d551818dc34c {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1112.234676] env[62070]: WARNING nova.compute.manager [req-ebc65897-8836-4440-8a61-1c97352d9b23 req-f29e7ecb-70ce-42fc-ad81-7091b6076a5d service nova] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Received unexpected event network-vif-plugged-38ef183a-1894-4b0e-b9e6-d551818dc34c for instance with vm_state building and task_state spawning. [ 1112.327411] env[62070]: DEBUG nova.network.neutron [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Successfully updated port: 38ef183a-1894-4b0e-b9e6-d551818dc34c {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1112.390716] env[62070]: DEBUG oslo_vmware.api [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122489, 'name': CloneVM_Task} progress is 95%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.456321] env[62070]: DEBUG nova.network.neutron [-] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.629428] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] There are 59 instances to clean {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 1112.629603] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: b1137be1-b66b-4eb2-bdbd-1db6173a1f93] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1112.663457] env[62070]: DEBUG nova.compute.manager [req-03675c18-48e1-4a49-a5a4-5cb266983f70 req-5d37e265-8788-4bd8-ba98-8e408f9d7e31 service nova] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Received event network-vif-deleted-629028b1-5fa6-4d6e-ba82-8c3c52f44a32 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1112.698453] env[62070]: DEBUG nova.network.neutron [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Updating instance_info_cache with network_info: [{"id": "933a577b-8b0c-4c0d-ae12-372e4b70b7c9", "address": "fa:16:3e:34:74:6b", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap933a577b-8b", "ovs_interfaceid": "933a577b-8b0c-4c0d-ae12-372e4b70b7c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.703280] env[62070]: DEBUG oslo_concurrency.lockutils [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1112.703544] env[62070]: DEBUG oslo_concurrency.lockutils [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1112.703778] env[62070]: DEBUG nova.objects.instance [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lazy-loading 'resources' on Instance uuid 38573462-18e5-4ba8-ad32-6ebc7bcf7c76 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1112.832535] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "refresh_cache-8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1112.832678] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquired lock "refresh_cache-8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.833151] env[62070]: DEBUG nova.network.neutron [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1112.891468] env[62070]: DEBUG oslo_vmware.api [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122489, 'name': CloneVM_Task, 'duration_secs': 1.620287} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.891706] env[62070]: INFO nova.virt.vmwareapi.vmops [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Created linked-clone VM from snapshot [ 1112.892464] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df2372e-1f91-44bc-9f8d-fa5fb57a4096 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.899485] env[62070]: DEBUG nova.virt.vmwareapi.images [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Uploading image 8a6c97c1-5195-4f75-914b-69d3dbca246d {{(pid=62070) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 1112.922917] env[62070]: DEBUG oslo_vmware.rw_handles [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1112.922917] env[62070]: value = "vm-245518" [ 1112.922917] env[62070]: _type = "VirtualMachine" [ 1112.922917] env[62070]: }. {{(pid=62070) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1112.923502] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-44c6e1c2-f252-4a3e-a110-35fcca76cd41 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.930970] env[62070]: DEBUG oslo_vmware.rw_handles [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lease: (returnval){ [ 1112.930970] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]524944c3-c457-b728-cd72-1d865b7860b9" [ 1112.930970] env[62070]: _type = "HttpNfcLease" [ 1112.930970] env[62070]: } obtained for exporting VM: (result){ [ 1112.930970] env[62070]: value = "vm-245518" [ 1112.930970] env[62070]: _type = "VirtualMachine" [ 1112.930970] env[62070]: }. {{(pid=62070) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1112.931226] env[62070]: DEBUG oslo_vmware.api [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the lease: (returnval){ [ 1112.931226] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]524944c3-c457-b728-cd72-1d865b7860b9" [ 1112.931226] env[62070]: _type = "HttpNfcLease" [ 1112.931226] env[62070]: } to be ready. {{(pid=62070) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1112.937523] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1112.937523] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]524944c3-c457-b728-cd72-1d865b7860b9" [ 1112.937523] env[62070]: _type = "HttpNfcLease" [ 1112.937523] env[62070]: } is initializing. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1112.959148] env[62070]: INFO nova.compute.manager [-] [instance: d8284a01-bbf6-4607-b2db-33bf2cd5457d] Took 1.27 seconds to deallocate network for instance. [ 1113.134936] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 27d9b478-7ebb-4313-a314-679ca0292086] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1113.201470] env[62070]: DEBUG oslo_concurrency.lockutils [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Releasing lock "refresh_cache-fec23dd4-e956-42dd-b9a2-c8577f77cd81" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1113.301209] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90f8d4b4-f363-4536-8973-8b6a8c3b0de5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.308453] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc3c8241-4c8e-4acf-80ac-14ce680d1d0f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.341017] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab60a7d2-9162-409a-ad31-def516b385a7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.348396] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-874c0d72-3bd5-4c6b-98e5-feb36f6095c7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.361701] env[62070]: DEBUG nova.compute.provider_tree [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1113.374795] env[62070]: DEBUG nova.network.neutron [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1113.438602] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1113.438602] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]524944c3-c457-b728-cd72-1d865b7860b9" [ 1113.438602] env[62070]: _type = "HttpNfcLease" [ 1113.438602] env[62070]: } is ready. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1113.439098] env[62070]: DEBUG oslo_vmware.rw_handles [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1113.439098] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]524944c3-c457-b728-cd72-1d865b7860b9" [ 1113.439098] env[62070]: _type = "HttpNfcLease" [ 1113.439098] env[62070]: }. {{(pid=62070) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1113.439894] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c610102b-f88f-4c15-be16-e323084b2cee {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.447096] env[62070]: DEBUG oslo_vmware.rw_handles [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521f1c93-508e-10c3-3656-363bcf371b57/disk-0.vmdk from lease info. {{(pid=62070) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1113.447278] env[62070]: DEBUG oslo_vmware.rw_handles [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521f1c93-508e-10c3-3656-363bcf371b57/disk-0.vmdk for reading. {{(pid=62070) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1113.506367] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1113.537934] env[62070]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-15c08820-8309-4846-bb9c-58f5f39edb36 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.545820] env[62070]: DEBUG nova.network.neutron [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Updating instance_info_cache with network_info: [{"id": "38ef183a-1894-4b0e-b9e6-d551818dc34c", "address": "fa:16:3e:cd:26:51", "network": {"id": "6ea9aade-1b40-4ce8-a502-14ff09a4ab40", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1617295069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "735d24ccc5614660a5b34d77af648f94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38ef183a-18", "ovs_interfaceid": "38ef183a-1894-4b0e-b9e6-d551818dc34c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.638480] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 000a67eb-9535-4da6-816a-b61126f11509] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1113.724231] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92e9140-a274-42b2-af20-6ac39efa5232 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.746014] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b962fc-aab1-4b67-bd22-81a38ca7b6f4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.753312] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Updating instance 'fec23dd4-e956-42dd-b9a2-c8577f77cd81' progress to 83 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1113.864176] env[62070]: DEBUG nova.scheduler.client.report [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1114.048376] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Releasing lock "refresh_cache-8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1114.048829] env[62070]: DEBUG nova.compute.manager [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Instance network_info: |[{"id": "38ef183a-1894-4b0e-b9e6-d551818dc34c", "address": "fa:16:3e:cd:26:51", "network": {"id": "6ea9aade-1b40-4ce8-a502-14ff09a4ab40", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1617295069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "735d24ccc5614660a5b34d77af648f94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38ef183a-18", "ovs_interfaceid": "38ef183a-1894-4b0e-b9e6-d551818dc34c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1114.049390] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:26:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5b8af79a-31d5-4d78-93d7-3919aa1d9186', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '38ef183a-1894-4b0e-b9e6-d551818dc34c', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1114.058686] env[62070]: DEBUG oslo.service.loopingcall [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1114.059403] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1114.059485] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-874e78cc-032a-4840-b67a-45513fec027c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.081939] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1114.081939] env[62070]: value = "task-1122494" [ 1114.081939] env[62070]: _type = "Task" [ 1114.081939] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.091337] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122494, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.142806] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: d65b4f74-7df5-4fb8-baa3-4f5b9b480cb2] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1114.259821] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1114.260448] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d85c8b16-bea1-41b0-b0b0-e4ae1563400b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.267453] env[62070]: DEBUG oslo_vmware.api [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1114.267453] env[62070]: value = "task-1122495" [ 1114.267453] env[62070]: _type = "Task" [ 1114.267453] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.270030] env[62070]: DEBUG nova.compute.manager [req-e52382f5-e8e8-479b-8df0-7e7ace64b8a7 req-ce4a8412-b636-47c0-9b70-f24b4dbc86d6 service nova] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Received event network-changed-38ef183a-1894-4b0e-b9e6-d551818dc34c {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1114.270337] env[62070]: DEBUG nova.compute.manager [req-e52382f5-e8e8-479b-8df0-7e7ace64b8a7 req-ce4a8412-b636-47c0-9b70-f24b4dbc86d6 service nova] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Refreshing instance network info cache due to event network-changed-38ef183a-1894-4b0e-b9e6-d551818dc34c. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1114.270617] env[62070]: DEBUG oslo_concurrency.lockutils [req-e52382f5-e8e8-479b-8df0-7e7ace64b8a7 req-ce4a8412-b636-47c0-9b70-f24b4dbc86d6 service nova] Acquiring lock "refresh_cache-8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1114.270828] env[62070]: DEBUG oslo_concurrency.lockutils [req-e52382f5-e8e8-479b-8df0-7e7ace64b8a7 req-ce4a8412-b636-47c0-9b70-f24b4dbc86d6 service nova] Acquired lock "refresh_cache-8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.271111] env[62070]: DEBUG nova.network.neutron [req-e52382f5-e8e8-479b-8df0-7e7ace64b8a7 req-ce4a8412-b636-47c0-9b70-f24b4dbc86d6 service nova] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Refreshing network info cache for port 38ef183a-1894-4b0e-b9e6-d551818dc34c {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1114.281989] env[62070]: DEBUG oslo_vmware.api [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122495, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.369850] env[62070]: DEBUG oslo_concurrency.lockutils [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.666s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1114.373260] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.867s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.373645] env[62070]: DEBUG nova.objects.instance [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lazy-loading 'resources' on Instance uuid d8284a01-bbf6-4607-b2db-33bf2cd5457d {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1114.394814] env[62070]: INFO nova.scheduler.client.report [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Deleted allocations for instance 38573462-18e5-4ba8-ad32-6ebc7bcf7c76 [ 1114.592395] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122494, 'name': CreateVM_Task, 'duration_secs': 0.370587} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.592752] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1114.593260] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1114.593497] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.594060] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1114.594370] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8747b9e1-2b52-4c0b-b81b-cdef03e6ea97 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.599477] env[62070]: DEBUG oslo_vmware.api [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1114.599477] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]528e20f6-f9c9-0de3-b721-8257c3f090d8" [ 1114.599477] env[62070]: _type = "Task" [ 1114.599477] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.607632] env[62070]: DEBUG oslo_vmware.api [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]528e20f6-f9c9-0de3-b721-8257c3f090d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.647380] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 6b6e22b9-71fb-4139-993a-7b9fcf89d8e0] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1114.782426] env[62070]: DEBUG oslo_vmware.api [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122495, 'name': PowerOnVM_Task, 'duration_secs': 0.38228} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.785226] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1114.785226] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-03eb9b6d-eabb-4896-b7c7-cd2c759e74f7 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Updating instance 'fec23dd4-e956-42dd-b9a2-c8577f77cd81' progress to 100 {{(pid=62070) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1114.904180] env[62070]: DEBUG oslo_concurrency.lockutils [None req-39e20de5-aa85-413d-8b4d-0862dcd05b83 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "38573462-18e5-4ba8-ad32-6ebc7bcf7c76" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.989s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1114.905168] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b7313fb3-2290-4fbd-8792-ef8ba59c6633 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "38573462-18e5-4ba8-ad32-6ebc7bcf7c76" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 5.665s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.905432] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b7313fb3-2290-4fbd-8792-ef8ba59c6633 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "38573462-18e5-4ba8-ad32-6ebc7bcf7c76-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.905594] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b7313fb3-2290-4fbd-8792-ef8ba59c6633 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "38573462-18e5-4ba8-ad32-6ebc7bcf7c76-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.905753] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b7313fb3-2290-4fbd-8792-ef8ba59c6633 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "38573462-18e5-4ba8-ad32-6ebc7bcf7c76-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1114.907684] env[62070]: INFO nova.compute.manager [None req-b7313fb3-2290-4fbd-8792-ef8ba59c6633 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Terminating instance [ 1114.909642] env[62070]: DEBUG nova.compute.manager [None req-b7313fb3-2290-4fbd-8792-ef8ba59c6633 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1114.913085] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5fb340d-9305-4f60-af51-e44a57a15bed {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.924374] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddbdb8a0-6b92-4714-887d-d2512db893ff {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.957066] env[62070]: WARNING nova.virt.vmwareapi.driver [None req-b7313fb3-2290-4fbd-8792-ef8ba59c6633 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 38573462-18e5-4ba8-ad32-6ebc7bcf7c76 could not be found. [ 1114.957370] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b7313fb3-2290-4fbd-8792-ef8ba59c6633 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1114.960389] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-af2d7229-3d24-46aa-948b-14db1b1cef11 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.968662] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ec5ccd-9cca-44c0-b209-89277706ff6e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.998495] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-b7313fb3-2290-4fbd-8792-ef8ba59c6633 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 38573462-18e5-4ba8-ad32-6ebc7bcf7c76 could not be found. [ 1114.998847] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b7313fb3-2290-4fbd-8792-ef8ba59c6633 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1114.999100] env[62070]: INFO nova.compute.manager [None req-b7313fb3-2290-4fbd-8792-ef8ba59c6633 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Took 0.09 seconds to destroy the instance on the hypervisor. [ 1114.999617] env[62070]: DEBUG oslo.service.loopingcall [None req-b7313fb3-2290-4fbd-8792-ef8ba59c6633 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1115.003420] env[62070]: DEBUG nova.compute.manager [-] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1115.003551] env[62070]: DEBUG nova.network.neutron [-] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1115.016520] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59280dc3-b78f-4403-bfef-df1a207fddf0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.025741] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f6efae0-929a-4178-986c-a7fe10cd2093 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.029494] env[62070]: DEBUG nova.network.neutron [req-e52382f5-e8e8-479b-8df0-7e7ace64b8a7 req-ce4a8412-b636-47c0-9b70-f24b4dbc86d6 service nova] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Updated VIF entry in instance network info cache for port 38ef183a-1894-4b0e-b9e6-d551818dc34c. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1115.031178] env[62070]: DEBUG nova.network.neutron [req-e52382f5-e8e8-479b-8df0-7e7ace64b8a7 req-ce4a8412-b636-47c0-9b70-f24b4dbc86d6 service nova] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Updating instance_info_cache with network_info: [{"id": "38ef183a-1894-4b0e-b9e6-d551818dc34c", "address": "fa:16:3e:cd:26:51", "network": {"id": "6ea9aade-1b40-4ce8-a502-14ff09a4ab40", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1617295069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "735d24ccc5614660a5b34d77af648f94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38ef183a-18", "ovs_interfaceid": "38ef183a-1894-4b0e-b9e6-d551818dc34c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.063220] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64bad6f6-9169-483c-b6f4-fa7a688ec32e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.071918] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f61e592-68e6-48f7-b4cb-41a70cf5a99e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.086427] env[62070]: DEBUG nova.compute.provider_tree [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1115.109532] env[62070]: DEBUG oslo_vmware.api [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]528e20f6-f9c9-0de3-b721-8257c3f090d8, 'name': SearchDatastore_Task, 'duration_secs': 0.013284} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.110283] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1115.110283] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1115.110527] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1115.110680] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.110876] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1115.111197] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a030d53-1be1-4f79-938d-010ed4a8b8a3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.118898] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1115.119094] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1115.119854] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7a75480-63e9-431c-bbfc-1d6e9b061d6f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.124658] env[62070]: DEBUG oslo_vmware.api [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1115.124658] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52c5ceb9-3775-bb9a-b213-c583c3589bd1" [ 1115.124658] env[62070]: _type = "Task" [ 1115.124658] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.132195] env[62070]: DEBUG oslo_vmware.api [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52c5ceb9-3775-bb9a-b213-c583c3589bd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.150820] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 1ababba6-838c-4ba6-bd83-e2b15aaf4b97] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1115.526042] env[62070]: DEBUG nova.network.neutron [-] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.534517] env[62070]: DEBUG oslo_concurrency.lockutils [req-e52382f5-e8e8-479b-8df0-7e7ace64b8a7 req-ce4a8412-b636-47c0-9b70-f24b4dbc86d6 service nova] Releasing lock "refresh_cache-8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1115.589517] env[62070]: DEBUG nova.scheduler.client.report [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1115.635509] env[62070]: DEBUG oslo_vmware.api [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52c5ceb9-3775-bb9a-b213-c583c3589bd1, 'name': SearchDatastore_Task, 'duration_secs': 0.012095} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.636565] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70f375d0-1a5e-4524-b243-0a85b3c44221 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.641761] env[62070]: DEBUG oslo_vmware.api [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1115.641761] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]522297c0-76c5-18c9-abe6-8c9a3e2a9543" [ 1115.641761] env[62070]: _type = "Task" [ 1115.641761] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.648923] env[62070]: DEBUG oslo_vmware.api [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]522297c0-76c5-18c9-abe6-8c9a3e2a9543, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.653483] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 7bfda953-ac95-4dce-b7a7-c570eae35582] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1116.028948] env[62070]: INFO nova.compute.manager [-] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Took 1.03 seconds to deallocate network for instance. [ 1116.043430] env[62070]: WARNING nova.volume.cinder [None req-b7313fb3-2290-4fbd-8792-ef8ba59c6633 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Attachment 95fba64c-4387-4519-9f89-3b213139bcba does not exist. Ignoring.: cinderclient.exceptions.NotFound: Volume attachment could not be found with filter: attachment_id = 95fba64c-4387-4519-9f89-3b213139bcba. (HTTP 404) (Request-ID: req-521537e8-e71e-496a-8de1-6b6e77364bc5) [ 1116.043721] env[62070]: INFO nova.compute.manager [None req-b7313fb3-2290-4fbd-8792-ef8ba59c6633 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Took 0.01 seconds to detach 1 volumes for instance. [ 1116.045839] env[62070]: DEBUG nova.compute.manager [None req-b7313fb3-2290-4fbd-8792-ef8ba59c6633 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Deleting volume: e82aa618-eed9-4876-a9ff-8289ed9697af {{(pid=62070) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3247}} [ 1116.061691] env[62070]: WARNING nova.compute.manager [None req-b7313fb3-2290-4fbd-8792-ef8ba59c6633 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Failed to delete volume: e82aa618-eed9-4876-a9ff-8289ed9697af due to Volume e82aa618-eed9-4876-a9ff-8289ed9697af could not be found.: nova.exception.VolumeNotFound: Volume e82aa618-eed9-4876-a9ff-8289ed9697af could not be found. [ 1116.094481] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.721s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.117725] env[62070]: INFO nova.scheduler.client.report [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Deleted allocations for instance d8284a01-bbf6-4607-b2db-33bf2cd5457d [ 1116.154611] env[62070]: DEBUG oslo_vmware.api [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]522297c0-76c5-18c9-abe6-8c9a3e2a9543, 'name': SearchDatastore_Task, 'duration_secs': 0.012802} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.154838] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1116.155121] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4/8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1116.155393] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cda767b1-420b-4775-a70f-de69fe3ccc0a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.157506] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 33d04e59-da01-4ba3-ac42-ab93372a332d] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1116.164523] env[62070]: DEBUG oslo_vmware.api [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1116.164523] env[62070]: value = "task-1122496" [ 1116.164523] env[62070]: _type = "Task" [ 1116.164523] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.172835] env[62070]: DEBUG oslo_vmware.api [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122496, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.565733] env[62070]: INFO nova.compute.manager [None req-b7313fb3-2290-4fbd-8792-ef8ba59c6633 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 38573462-18e5-4ba8-ad32-6ebc7bcf7c76] Instance disappeared during terminate [ 1116.566690] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b7313fb3-2290-4fbd-8792-ef8ba59c6633 tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "38573462-18e5-4ba8-ad32-6ebc7bcf7c76" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 1.661s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.626712] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ad53ce46-223f-41b7-b653-59855f256bfe tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "d8284a01-bbf6-4607-b2db-33bf2cd5457d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.082s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.660542] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 5cccd79d-d243-49db-8581-718dd594f3b3] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1116.675520] env[62070]: DEBUG oslo_vmware.api [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122496, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.164780] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: b9e5a798-9ce9-4e36-8c32-bc32b0dc1eae] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1117.176769] env[62070]: DEBUG oslo_vmware.api [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122496, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533304} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.176769] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4/8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1117.177211] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1117.177211] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9fcabed2-ead7-49d3-b05e-0fea1c9f8b59 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.185412] env[62070]: DEBUG oslo_vmware.api [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1117.185412] env[62070]: value = "task-1122498" [ 1117.185412] env[62070]: _type = "Task" [ 1117.185412] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.192224] env[62070]: DEBUG oslo_vmware.api [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122498, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.667813] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: f810eab8-1c8c-4f7a-8acd-f46ac5e6d31f] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1117.695097] env[62070]: DEBUG oslo_vmware.api [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122498, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.145463} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.695965] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1117.696807] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b5c8163-6b6c-4beb-b5c9-8842411248b4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.720796] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4/8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1117.721444] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a2581ab-062b-4e0d-b46d-2a6fdc6c3efe {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.735877] env[62070]: DEBUG nova.network.neutron [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Port 933a577b-8b0c-4c0d-ae12-372e4b70b7c9 binding to destination host cpu-1 is already ACTIVE {{(pid=62070) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1117.736166] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "refresh_cache-fec23dd4-e956-42dd-b9a2-c8577f77cd81" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1117.736322] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired lock "refresh_cache-fec23dd4-e956-42dd-b9a2-c8577f77cd81" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.736489] env[62070]: DEBUG nova.network.neutron [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1117.744782] env[62070]: DEBUG oslo_vmware.api [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1117.744782] env[62070]: value = "task-1122499" [ 1117.744782] env[62070]: _type = "Task" [ 1117.744782] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.753686] env[62070]: DEBUG oslo_vmware.api [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122499, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.173767] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 6cba961f-f9f9-4d3c-853a-049a014c9dbb] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1118.253940] env[62070]: DEBUG oslo_vmware.api [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122499, 'name': ReconfigVM_Task, 'duration_secs': 0.484762} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.254393] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4/8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1118.255073] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0f3eb951-480e-4fe6-9343-d00a8299c9cd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.261411] env[62070]: DEBUG oslo_vmware.api [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1118.261411] env[62070]: value = "task-1122500" [ 1118.261411] env[62070]: _type = "Task" [ 1118.261411] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.269690] env[62070]: DEBUG oslo_vmware.api [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122500, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.510077] env[62070]: DEBUG nova.network.neutron [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Updating instance_info_cache with network_info: [{"id": "933a577b-8b0c-4c0d-ae12-372e4b70b7c9", "address": "fa:16:3e:34:74:6b", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap933a577b-8b", "ovs_interfaceid": "933a577b-8b0c-4c0d-ae12-372e4b70b7c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.677298] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: e4cf42ff-8440-42bc-b629-4b712fd94e99] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1118.771929] env[62070]: DEBUG oslo_vmware.api [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122500, 'name': Rename_Task, 'duration_secs': 0.147319} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.772318] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1118.772583] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3ff3760b-ab14-4022-b963-1b64d55f78f9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.778602] env[62070]: DEBUG oslo_vmware.api [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1118.778602] env[62070]: value = "task-1122501" [ 1118.778602] env[62070]: _type = "Task" [ 1118.778602] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.786208] env[62070]: DEBUG oslo_vmware.api [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122501, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.013087] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Releasing lock "refresh_cache-fec23dd4-e956-42dd-b9a2-c8577f77cd81" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1119.181338] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: b101c79a-abfd-4104-aaed-096995fb2337] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1119.288684] env[62070]: DEBUG oslo_vmware.api [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122501, 'name': PowerOnVM_Task, 'duration_secs': 0.470568} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.289041] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1119.289234] env[62070]: INFO nova.compute.manager [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Took 7.27 seconds to spawn the instance on the hypervisor. [ 1119.289420] env[62070]: DEBUG nova.compute.manager [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1119.290217] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87c4dfd9-cc55-41d7-9835-72ec8192efb1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.516592] env[62070]: DEBUG nova.compute.manager [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62070) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:897}} [ 1119.516944] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.517220] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1119.685604] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 1d595bc8-ab51-4443-bf32-079078f3133b] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1119.809214] env[62070]: INFO nova.compute.manager [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Took 13.32 seconds to build instance. [ 1120.021061] env[62070]: DEBUG nova.objects.instance [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lazy-loading 'migration_context' on Instance uuid fec23dd4-e956-42dd-b9a2-c8577f77cd81 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1120.189442] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: a5cba512-9b50-4ca3-93eb-345be12dc588] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1120.221305] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "67e99ada-a8e6-4034-b19b-5b2cb883b735" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.221720] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "67e99ada-a8e6-4034-b19b-5b2cb883b735" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.222060] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "67e99ada-a8e6-4034-b19b-5b2cb883b735-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.222341] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "67e99ada-a8e6-4034-b19b-5b2cb883b735-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.222608] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "67e99ada-a8e6-4034-b19b-5b2cb883b735-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.225115] env[62070]: INFO nova.compute.manager [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Terminating instance [ 1120.227241] env[62070]: DEBUG nova.compute.manager [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1120.227443] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1120.228291] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1640d35-d0d4-4aea-b86a-aba995753407 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.235894] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1120.236171] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aa287065-fe00-45db-86b3-d5a0a9364933 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.243364] env[62070]: DEBUG oslo_vmware.api [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1120.243364] env[62070]: value = "task-1122502" [ 1120.243364] env[62070]: _type = "Task" [ 1120.243364] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.251567] env[62070]: DEBUG oslo_vmware.api [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122502, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.311750] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b0715c5-79e9-480e-83e7-b5809f9a7aae tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.828s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.693184] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 53a1791d-38fd-4721-b82c-2f0922348300] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1120.696757] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34c21f6f-c8af-4f39-a0b8-e52d05839904 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.704719] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ea54fe-7abc-49c9-96d2-71c7db28359e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.735063] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2211c2ea-a065-45c8-8faf-67ea67cf204e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.743128] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242824b4-7cd7-43b5-b790-9b803da3ddb6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.760865] env[62070]: DEBUG nova.compute.provider_tree [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1120.764730] env[62070]: DEBUG oslo_vmware.api [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122502, 'name': PowerOffVM_Task, 'duration_secs': 0.295566} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.765209] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1120.765380] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1120.765632] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-00f6f4bb-a8c9-4c96-a872-63e347c39b46 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.839732] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1120.839946] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1120.840146] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Deleting the datastore file [datastore1] 67e99ada-a8e6-4034-b19b-5b2cb883b735 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1120.840410] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b1c6af28-1bdb-433b-aad3-fc6046e6553e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.847758] env[62070]: DEBUG oslo_vmware.api [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for the task: (returnval){ [ 1120.847758] env[62070]: value = "task-1122504" [ 1120.847758] env[62070]: _type = "Task" [ 1120.847758] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.857020] env[62070]: DEBUG oslo_vmware.api [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122504, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.200488] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 519cad6a-ebe0-42db-a19e-27249b83436e] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1121.266892] env[62070]: DEBUG nova.scheduler.client.report [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1121.279695] env[62070]: DEBUG nova.compute.manager [req-c7cf5dac-0306-4f90-8483-4cf9e2ed44d9 req-63544d00-b4b8-408a-91c4-517797ecf6d5 service nova] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Received event network-changed-38ef183a-1894-4b0e-b9e6-d551818dc34c {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1121.279695] env[62070]: DEBUG nova.compute.manager [req-c7cf5dac-0306-4f90-8483-4cf9e2ed44d9 req-63544d00-b4b8-408a-91c4-517797ecf6d5 service nova] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Refreshing instance network info cache due to event network-changed-38ef183a-1894-4b0e-b9e6-d551818dc34c. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1121.279904] env[62070]: DEBUG oslo_concurrency.lockutils [req-c7cf5dac-0306-4f90-8483-4cf9e2ed44d9 req-63544d00-b4b8-408a-91c4-517797ecf6d5 service nova] Acquiring lock "refresh_cache-8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1121.280387] env[62070]: DEBUG oslo_concurrency.lockutils [req-c7cf5dac-0306-4f90-8483-4cf9e2ed44d9 req-63544d00-b4b8-408a-91c4-517797ecf6d5 service nova] Acquired lock "refresh_cache-8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.280602] env[62070]: DEBUG nova.network.neutron [req-c7cf5dac-0306-4f90-8483-4cf9e2ed44d9 req-63544d00-b4b8-408a-91c4-517797ecf6d5 service nova] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Refreshing network info cache for port 38ef183a-1894-4b0e-b9e6-d551818dc34c {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1121.359749] env[62070]: DEBUG oslo_vmware.api [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Task: {'id': task-1122504, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.256633} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.360017] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1121.360223] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1121.360410] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1121.360592] env[62070]: INFO nova.compute.manager [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1121.360839] env[62070]: DEBUG oslo.service.loopingcall [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1121.361055] env[62070]: DEBUG nova.compute.manager [-] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1121.361181] env[62070]: DEBUG nova.network.neutron [-] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1121.703598] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 2c58db1d-405f-4489-85db-c74723be4a8d] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1122.103147] env[62070]: DEBUG nova.network.neutron [req-c7cf5dac-0306-4f90-8483-4cf9e2ed44d9 req-63544d00-b4b8-408a-91c4-517797ecf6d5 service nova] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Updated VIF entry in instance network info cache for port 38ef183a-1894-4b0e-b9e6-d551818dc34c. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1122.103806] env[62070]: DEBUG nova.network.neutron [req-c7cf5dac-0306-4f90-8483-4cf9e2ed44d9 req-63544d00-b4b8-408a-91c4-517797ecf6d5 service nova] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Updating instance_info_cache with network_info: [{"id": "38ef183a-1894-4b0e-b9e6-d551818dc34c", "address": "fa:16:3e:cd:26:51", "network": {"id": "6ea9aade-1b40-4ce8-a502-14ff09a4ab40", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1617295069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "735d24ccc5614660a5b34d77af648f94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38ef183a-18", "ovs_interfaceid": "38ef183a-1894-4b0e-b9e6-d551818dc34c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.127431] env[62070]: DEBUG nova.network.neutron [-] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.160252] env[62070]: DEBUG oslo_vmware.rw_handles [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521f1c93-508e-10c3-3656-363bcf371b57/disk-0.vmdk. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1122.161313] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7bce7fe-744e-4a83-9774-851c1da3d8fa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.168213] env[62070]: DEBUG oslo_vmware.rw_handles [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521f1c93-508e-10c3-3656-363bcf371b57/disk-0.vmdk is in state: ready. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1122.168413] env[62070]: ERROR oslo_vmware.rw_handles [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521f1c93-508e-10c3-3656-363bcf371b57/disk-0.vmdk due to incomplete transfer. [ 1122.168628] env[62070]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-287d6708-cc24-4383-9d5e-fdb4e9d392d3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.175802] env[62070]: DEBUG oslo_vmware.rw_handles [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521f1c93-508e-10c3-3656-363bcf371b57/disk-0.vmdk. {{(pid=62070) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1122.176010] env[62070]: DEBUG nova.virt.vmwareapi.images [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Uploaded image 8a6c97c1-5195-4f75-914b-69d3dbca246d to the Glance image server {{(pid=62070) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 1122.178244] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Destroying the VM {{(pid=62070) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 1122.178506] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1c2a1dec-9d78-47bb-af22-8dcc4fc3df8f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.184281] env[62070]: DEBUG oslo_vmware.api [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1122.184281] env[62070]: value = "task-1122505" [ 1122.184281] env[62070]: _type = "Task" [ 1122.184281] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.191978] env[62070]: DEBUG oslo_vmware.api [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122505, 'name': Destroy_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.206504] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: e850734f-c49c-46d7-87ab-b0d6bed89d9b] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1122.278070] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.761s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1122.606548] env[62070]: DEBUG oslo_concurrency.lockutils [req-c7cf5dac-0306-4f90-8483-4cf9e2ed44d9 req-63544d00-b4b8-408a-91c4-517797ecf6d5 service nova] Releasing lock "refresh_cache-8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1122.629338] env[62070]: INFO nova.compute.manager [-] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Took 1.27 seconds to deallocate network for instance. [ 1122.693614] env[62070]: DEBUG oslo_vmware.api [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122505, 'name': Destroy_Task, 'duration_secs': 0.395496} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.693930] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Destroyed the VM [ 1122.694204] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Deleting Snapshot of the VM instance {{(pid=62070) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1122.694446] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c8b69da0-3477-4989-a965-ec7abbfea53b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.701127] env[62070]: DEBUG oslo_vmware.api [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1122.701127] env[62070]: value = "task-1122506" [ 1122.701127] env[62070]: _type = "Task" [ 1122.701127] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.708246] env[62070]: DEBUG oslo_vmware.api [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122506, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.709709] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 84c00e4a-20d3-4739-8535-e27076d85a89] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1123.136218] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1123.136541] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.136860] env[62070]: DEBUG nova.objects.instance [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lazy-loading 'resources' on Instance uuid 67e99ada-a8e6-4034-b19b-5b2cb883b735 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1123.211315] env[62070]: DEBUG oslo_vmware.api [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122506, 'name': RemoveSnapshot_Task, 'duration_secs': 0.381117} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.211603] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Deleted Snapshot of the VM instance {{(pid=62070) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1123.211893] env[62070]: DEBUG nova.compute.manager [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1123.212311] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 1440361b-d3b4-4c1c-995c-fe7ff99ee297] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1123.214411] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ba2f0d-f127-4f4a-af7d-bf4e819416cc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.305323] env[62070]: DEBUG nova.compute.manager [req-468b4956-2286-4b54-b49e-d64c259f09e6 req-40144225-f0d9-4f5c-9186-6f1a5ea35ca3 service nova] [instance: 67e99ada-a8e6-4034-b19b-5b2cb883b735] Received event network-vif-deleted-e01eb485-1347-4afb-b881-62797a5b84af {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1123.718107] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: cf52cee8-874e-44e8-a36e-49ac20f3e312] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1123.721150] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe415e6-db1d-4c00-959e-4a4e70a0075d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.726652] env[62070]: INFO nova.compute.manager [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Shelve offloading [ 1123.728135] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1123.728364] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd0e0c7a-714b-46a2-b365-b66a56da5ed7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.732432] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e62e3b7-eb79-46f9-b551-5f0adc9e78e4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.736613] env[62070]: DEBUG oslo_vmware.api [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1123.736613] env[62070]: value = "task-1122507" [ 1123.736613] env[62070]: _type = "Task" [ 1123.736613] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.765823] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ae8416b-1de1-4752-8ae3-0e3afc0296f3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.772103] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] VM already powered off {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1123.772334] env[62070]: DEBUG nova.compute.manager [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1123.773036] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd066402-c8de-4d7b-8f8a-7f1b80e32363 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.778523] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865e8d74-4cd0-468f-9201-1402ee6349d3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.783529] env[62070]: DEBUG oslo_concurrency.lockutils [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "refresh_cache-20c4fabc-fc9b-49c7-ab28-fa092ad66038" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1123.783700] env[62070]: DEBUG oslo_concurrency.lockutils [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquired lock "refresh_cache-20c4fabc-fc9b-49c7-ab28-fa092ad66038" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.783873] env[62070]: DEBUG nova.network.neutron [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1123.794483] env[62070]: DEBUG nova.compute.provider_tree [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1123.813116] env[62070]: INFO nova.compute.manager [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Swapping old allocation on dict_keys(['21c7c111-1b69-4468-b2c4-5dd96014fbd6']) held by migration 9881befb-bca4-4f20-8cfa-c50e03abd0bf for instance [ 1123.832994] env[62070]: DEBUG nova.scheduler.client.report [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Overwriting current allocation {'allocations': {'21c7c111-1b69-4468-b2c4-5dd96014fbd6': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 159}}, 'project_id': 'f29ac48ab6544ec0bd1d210aec05dbc5', 'user_id': '0ab707a4862f42199fc2a91733563cde', 'consumer_generation': 1} on consumer fec23dd4-e956-42dd-b9a2-c8577f77cd81 {{(pid=62070) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1123.908331] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "refresh_cache-fec23dd4-e956-42dd-b9a2-c8577f77cd81" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1123.908529] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired lock "refresh_cache-fec23dd4-e956-42dd-b9a2-c8577f77cd81" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.908710] env[62070]: DEBUG nova.network.neutron [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1124.224662] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 3d699ce5-4d21-48f3-8f17-0cd49aebf109] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1124.297581] env[62070]: DEBUG nova.scheduler.client.report [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1124.536439] env[62070]: DEBUG nova.network.neutron [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Updating instance_info_cache with network_info: [{"id": "0eb3c7d4-224c-4d94-9450-0623a1e1b162", "address": "fa:16:3e:d9:a2:6e", "network": {"id": "b9ef8f6c-bbd6-409d-a591-ad584e5e028f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-599171324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca25fba006b740f2a86fe10e4abe9400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0eb3c7d4-22", "ovs_interfaceid": "0eb3c7d4-224c-4d94-9450-0623a1e1b162", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1124.604758] env[62070]: DEBUG nova.network.neutron [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Updating instance_info_cache with network_info: [{"id": "933a577b-8b0c-4c0d-ae12-372e4b70b7c9", "address": "fa:16:3e:34:74:6b", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap933a577b-8b", "ovs_interfaceid": "933a577b-8b0c-4c0d-ae12-372e4b70b7c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1124.728957] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: e51d0146-502a-4ace-856e-b0dbcc11edea] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1124.802795] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.666s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.818535] env[62070]: INFO nova.scheduler.client.report [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Deleted allocations for instance 67e99ada-a8e6-4034-b19b-5b2cb883b735 [ 1125.039348] env[62070]: DEBUG oslo_concurrency.lockutils [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Releasing lock "refresh_cache-20c4fabc-fc9b-49c7-ab28-fa092ad66038" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1125.107641] env[62070]: DEBUG oslo_concurrency.lockutils [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Releasing lock "refresh_cache-fec23dd4-e956-42dd-b9a2-c8577f77cd81" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1125.108128] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1125.108408] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0b16965e-57d0-4a6f-8c2a-51368fef9c8c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.114891] env[62070]: DEBUG oslo_vmware.api [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1125.114891] env[62070]: value = "task-1122508" [ 1125.114891] env[62070]: _type = "Task" [ 1125.114891] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.124297] env[62070]: DEBUG oslo_vmware.api [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122508, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.232393] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 7dc27fe6-495f-498d-88fe-a99ddc19a21c] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1125.270462] env[62070]: DEBUG nova.compute.manager [req-44002bb4-dff7-4644-bdc4-82e1e629d702 req-a55e7c62-8e0e-48ec-b201-192843e55e66 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Received event network-vif-unplugged-0eb3c7d4-224c-4d94-9450-0623a1e1b162 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1125.270744] env[62070]: DEBUG oslo_concurrency.lockutils [req-44002bb4-dff7-4644-bdc4-82e1e629d702 req-a55e7c62-8e0e-48ec-b201-192843e55e66 service nova] Acquiring lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1125.271045] env[62070]: DEBUG oslo_concurrency.lockutils [req-44002bb4-dff7-4644-bdc4-82e1e629d702 req-a55e7c62-8e0e-48ec-b201-192843e55e66 service nova] Lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1125.271294] env[62070]: DEBUG oslo_concurrency.lockutils [req-44002bb4-dff7-4644-bdc4-82e1e629d702 req-a55e7c62-8e0e-48ec-b201-192843e55e66 service nova] Lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.271528] env[62070]: DEBUG nova.compute.manager [req-44002bb4-dff7-4644-bdc4-82e1e629d702 req-a55e7c62-8e0e-48ec-b201-192843e55e66 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] No waiting events found dispatching network-vif-unplugged-0eb3c7d4-224c-4d94-9450-0623a1e1b162 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1125.271768] env[62070]: WARNING nova.compute.manager [req-44002bb4-dff7-4644-bdc4-82e1e629d702 req-a55e7c62-8e0e-48ec-b201-192843e55e66 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Received unexpected event network-vif-unplugged-0eb3c7d4-224c-4d94-9450-0623a1e1b162 for instance with vm_state shelved and task_state shelving_offloading. [ 1125.325670] env[62070]: DEBUG oslo_concurrency.lockutils [None req-ee9e3d70-0806-4938-92f1-4057f8cea07a tempest-ServerActionsTestOtherA-1868654732 tempest-ServerActionsTestOtherA-1868654732-project-member] Lock "67e99ada-a8e6-4034-b19b-5b2cb883b735" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.104s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.352934] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1125.353826] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baba8e28-4c18-4097-bb8a-ce0bdc890d41 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.361546] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1125.361810] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a3864259-7ef8-45dc-a113-d18c1e7e2fb8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.422137] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1125.422378] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1125.422634] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Deleting the datastore file [datastore2] 20c4fabc-fc9b-49c7-ab28-fa092ad66038 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1125.422834] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d2ab04f3-4679-4e7c-a691-3db7f468dce3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.429529] env[62070]: DEBUG oslo_vmware.api [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1125.429529] env[62070]: value = "task-1122510" [ 1125.429529] env[62070]: _type = "Task" [ 1125.429529] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.437852] env[62070]: DEBUG oslo_vmware.api [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122510, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.624694] env[62070]: DEBUG oslo_vmware.api [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122508, 'name': PowerOffVM_Task, 'duration_secs': 0.170232} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.624972] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1125.625664] env[62070]: DEBUG nova.virt.hardware [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1125.625890] env[62070]: DEBUG nova.virt.hardware [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1125.626068] env[62070]: DEBUG nova.virt.hardware [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1125.626267] env[62070]: DEBUG nova.virt.hardware [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1125.626417] env[62070]: DEBUG nova.virt.hardware [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1125.626568] env[62070]: DEBUG nova.virt.hardware [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1125.626772] env[62070]: DEBUG nova.virt.hardware [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1125.626935] env[62070]: DEBUG nova.virt.hardware [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1125.627141] env[62070]: DEBUG nova.virt.hardware [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1125.627282] env[62070]: DEBUG nova.virt.hardware [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1125.627460] env[62070]: DEBUG nova.virt.hardware [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1125.632463] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-855aca3b-3a9a-4227-8f79-43b841d96a0c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.646840] env[62070]: DEBUG oslo_vmware.api [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1125.646840] env[62070]: value = "task-1122511" [ 1125.646840] env[62070]: _type = "Task" [ 1125.646840] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.654261] env[62070]: DEBUG oslo_vmware.api [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122511, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.736024] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: d2cfcfac-4f15-4b16-9046-76722ee2e39b] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1125.939394] env[62070]: DEBUG oslo_vmware.api [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122510, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137166} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.939651] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1125.939844] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1125.940103] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1125.964882] env[62070]: INFO nova.scheduler.client.report [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Deleted allocations for instance 20c4fabc-fc9b-49c7-ab28-fa092ad66038 [ 1126.156163] env[62070]: DEBUG oslo_vmware.api [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122511, 'name': ReconfigVM_Task, 'duration_secs': 0.12793} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.156981] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a46397-a4ff-4314-bfc6-5f474aa0f929 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.174667] env[62070]: DEBUG nova.virt.hardware [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1126.174914] env[62070]: DEBUG nova.virt.hardware [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1126.175134] env[62070]: DEBUG nova.virt.hardware [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1126.175393] env[62070]: DEBUG nova.virt.hardware [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1126.175556] env[62070]: DEBUG nova.virt.hardware [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1126.175711] env[62070]: DEBUG nova.virt.hardware [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1126.175916] env[62070]: DEBUG nova.virt.hardware [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1126.176094] env[62070]: DEBUG nova.virt.hardware [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1126.176267] env[62070]: DEBUG nova.virt.hardware [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1126.176433] env[62070]: DEBUG nova.virt.hardware [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1126.176609] env[62070]: DEBUG nova.virt.hardware [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1126.177377] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e62f3e1-0451-462d-bef5-849ee2a7bd19 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.185172] env[62070]: DEBUG oslo_vmware.api [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1126.185172] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]521342ed-bdee-a793-9309-0fd1799f01a6" [ 1126.185172] env[62070]: _type = "Task" [ 1126.185172] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.198216] env[62070]: DEBUG oslo_vmware.api [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]521342ed-bdee-a793-9309-0fd1799f01a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.239461] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 559eee5b-0834-4dcf-a436-5e58644c7a3b] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1126.469634] env[62070]: DEBUG oslo_concurrency.lockutils [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.469934] env[62070]: DEBUG oslo_concurrency.lockutils [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.470193] env[62070]: DEBUG nova.objects.instance [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lazy-loading 'resources' on Instance uuid 20c4fabc-fc9b-49c7-ab28-fa092ad66038 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1126.695585] env[62070]: DEBUG oslo_vmware.api [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]521342ed-bdee-a793-9309-0fd1799f01a6, 'name': SearchDatastore_Task, 'duration_secs': 0.00759} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.700900] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Reconfiguring VM instance instance-00000067 to detach disk 2000 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1126.701296] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95146bfa-d6ae-4682-bde3-f7c3a6a04e35 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.721860] env[62070]: DEBUG oslo_vmware.api [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1126.721860] env[62070]: value = "task-1122512" [ 1126.721860] env[62070]: _type = "Task" [ 1126.721860] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.730243] env[62070]: DEBUG oslo_vmware.api [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122512, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.744095] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 21bcb1a6-833b-48f3-8ee2-0e49c64a104f] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1126.973946] env[62070]: DEBUG nova.objects.instance [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lazy-loading 'numa_topology' on Instance uuid 20c4fabc-fc9b-49c7-ab28-fa092ad66038 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1127.232491] env[62070]: DEBUG oslo_vmware.api [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122512, 'name': ReconfigVM_Task, 'duration_secs': 0.268937} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.232864] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Reconfigured VM instance instance-00000067 to detach disk 2000 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1127.233719] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd98286-7ced-442c-9097-74fd21b21ae1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.250231] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 71c98ac8-4149-448b-bf0c-3bfdcc8f50ef] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1127.260282] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] fec23dd4-e956-42dd-b9a2-c8577f77cd81/fec23dd4-e956-42dd-b9a2-c8577f77cd81.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1127.260947] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b7b2d90-959b-43c0-8559-6b2094110f1b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.281840] env[62070]: DEBUG oslo_vmware.api [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1127.281840] env[62070]: value = "task-1122513" [ 1127.281840] env[62070]: _type = "Task" [ 1127.281840] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.291983] env[62070]: DEBUG oslo_vmware.api [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122513, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.295923] env[62070]: DEBUG nova.compute.manager [req-606e5893-7ca9-43a8-9d5b-553d37c406e0 req-10d4e0e7-0f91-4703-a7b7-8b26ee5c9e73 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Received event network-changed-0eb3c7d4-224c-4d94-9450-0623a1e1b162 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1127.296229] env[62070]: DEBUG nova.compute.manager [req-606e5893-7ca9-43a8-9d5b-553d37c406e0 req-10d4e0e7-0f91-4703-a7b7-8b26ee5c9e73 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Refreshing instance network info cache due to event network-changed-0eb3c7d4-224c-4d94-9450-0623a1e1b162. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1127.296426] env[62070]: DEBUG oslo_concurrency.lockutils [req-606e5893-7ca9-43a8-9d5b-553d37c406e0 req-10d4e0e7-0f91-4703-a7b7-8b26ee5c9e73 service nova] Acquiring lock "refresh_cache-20c4fabc-fc9b-49c7-ab28-fa092ad66038" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1127.296634] env[62070]: DEBUG oslo_concurrency.lockutils [req-606e5893-7ca9-43a8-9d5b-553d37c406e0 req-10d4e0e7-0f91-4703-a7b7-8b26ee5c9e73 service nova] Acquired lock "refresh_cache-20c4fabc-fc9b-49c7-ab28-fa092ad66038" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1127.296814] env[62070]: DEBUG nova.network.neutron [req-606e5893-7ca9-43a8-9d5b-553d37c406e0 req-10d4e0e7-0f91-4703-a7b7-8b26ee5c9e73 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Refreshing network info cache for port 0eb3c7d4-224c-4d94-9450-0623a1e1b162 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1127.477315] env[62070]: DEBUG nova.objects.base [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Object Instance<20c4fabc-fc9b-49c7-ab28-fa092ad66038> lazy-loaded attributes: resources,numa_topology {{(pid=62070) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1127.525987] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1095e9df-112e-4fe6-b1f2-6643ad6bfcb1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.534154] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e78d58-8935-40eb-a1c0-06e34ef97ee0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.567954] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c0cdbd7-0c36-436a-bf9b-84bd68326e0e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.575525] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b07d23c-d256-4f89-a907-c1678705574a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.588148] env[62070]: DEBUG nova.compute.provider_tree [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1127.761822] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 58146b84-7589-4f21-bdab-605cee535e55] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1127.793983] env[62070]: DEBUG oslo_vmware.api [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122513, 'name': ReconfigVM_Task, 'duration_secs': 0.412545} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.794407] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Reconfigured VM instance instance-00000067 to attach disk [datastore1] fec23dd4-e956-42dd-b9a2-c8577f77cd81/fec23dd4-e956-42dd-b9a2-c8577f77cd81.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1127.795143] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bebec3b3-3331-431f-a0e7-50575d9e5a74 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.814518] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f2d683-3f86-4c68-83f0-b7657d51c2f5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.833368] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815c07c3-85f9-48b7-94df-08aa9cbcbbe0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.850232] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edfa0bf2-abf0-4ce3-9ee8-3d632a267cbc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.856324] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1127.858534] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-44072204-a078-4cd5-ad41-51f67c9bafef {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.865256] env[62070]: DEBUG oslo_vmware.api [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1127.865256] env[62070]: value = "task-1122514" [ 1127.865256] env[62070]: _type = "Task" [ 1127.865256] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.872123] env[62070]: DEBUG oslo_vmware.api [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122514, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.067774] env[62070]: DEBUG nova.network.neutron [req-606e5893-7ca9-43a8-9d5b-553d37c406e0 req-10d4e0e7-0f91-4703-a7b7-8b26ee5c9e73 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Updated VIF entry in instance network info cache for port 0eb3c7d4-224c-4d94-9450-0623a1e1b162. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1128.068166] env[62070]: DEBUG nova.network.neutron [req-606e5893-7ca9-43a8-9d5b-553d37c406e0 req-10d4e0e7-0f91-4703-a7b7-8b26ee5c9e73 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Updating instance_info_cache with network_info: [{"id": "0eb3c7d4-224c-4d94-9450-0623a1e1b162", "address": "fa:16:3e:d9:a2:6e", "network": {"id": "b9ef8f6c-bbd6-409d-a591-ad584e5e028f", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-599171324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca25fba006b740f2a86fe10e4abe9400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap0eb3c7d4-22", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.091252] env[62070]: DEBUG nova.scheduler.client.report [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1128.137826] env[62070]: DEBUG oslo_concurrency.lockutils [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.267386] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: fb054a32-c1aa-4884-a087-da5ad34cf3c4] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1128.374804] env[62070]: DEBUG oslo_vmware.api [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122514, 'name': PowerOnVM_Task, 'duration_secs': 0.370887} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.376024] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1128.571846] env[62070]: DEBUG oslo_concurrency.lockutils [req-606e5893-7ca9-43a8-9d5b-553d37c406e0 req-10d4e0e7-0f91-4703-a7b7-8b26ee5c9e73 service nova] Releasing lock "refresh_cache-20c4fabc-fc9b-49c7-ab28-fa092ad66038" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1128.598321] env[62070]: DEBUG oslo_concurrency.lockutils [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.126s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1128.770880] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 4bba7448-69f7-4764-9ae6-eb6585f71515] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1129.107224] env[62070]: DEBUG oslo_concurrency.lockutils [None req-895fa041-4a30-4097-9383-c3cb465b2e41 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 21.361s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1129.108879] env[62070]: DEBUG oslo_concurrency.lockutils [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.971s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1129.109519] env[62070]: INFO nova.compute.manager [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Unshelving [ 1129.275585] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 61ab347d-1342-4f59-8955-10d575993b77] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1129.386539] env[62070]: INFO nova.compute.manager [None req-1ffa7e81-5d7a-4857-b524-004d7d3fe3fc tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Updating instance to original state: 'active' [ 1129.779464] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: c16d175c-0b23-4f72-bdb0-844c6f80fd32] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1130.123306] env[62070]: DEBUG nova.compute.utils [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1130.286121] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: e5deccf6-f967-4e3c-bee0-2e1ad0bb4560] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1130.628028] env[62070]: INFO nova.virt.block_device [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Booting with volume a9b04c95-bddc-41c3-99a5-c067625df8d2 at /dev/sdb [ 1130.659810] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a2ffc34d-c820-46e7-9b48-d44eea6cacc3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.669705] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab9cedd4-c58c-478e-b500-223be5d782e3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.693622] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-43d178cf-b656-41d6-bc61-4959acca7dea {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.701644] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7832896e-c945-4e7a-b8a8-aa1a10634406 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.724882] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd4e754-4a4b-47ba-849d-02dc2390cf25 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.731052] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec69e85-00aa-4ba3-b9a2-caf6ffb2940a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.743539] env[62070]: DEBUG nova.virt.block_device [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Updating existing volume attachment record: 5b24c746-e43b-478b-a770-80edc9d4c61f {{(pid=62070) _volume_attach /opt/stack/nova/nova/virt/block_device.py:679}} [ 1130.789473] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 27987ff6-77c9-4876-8b39-dcc20ce4158a] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1130.904348] env[62070]: DEBUG oslo_concurrency.lockutils [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "fec23dd4-e956-42dd-b9a2-c8577f77cd81" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.904666] env[62070]: DEBUG oslo_concurrency.lockutils [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "fec23dd4-e956-42dd-b9a2-c8577f77cd81" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.904942] env[62070]: DEBUG oslo_concurrency.lockutils [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "fec23dd4-e956-42dd-b9a2-c8577f77cd81-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.905191] env[62070]: DEBUG oslo_concurrency.lockutils [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "fec23dd4-e956-42dd-b9a2-c8577f77cd81-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.905411] env[62070]: DEBUG oslo_concurrency.lockutils [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "fec23dd4-e956-42dd-b9a2-c8577f77cd81-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1130.907920] env[62070]: INFO nova.compute.manager [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Terminating instance [ 1130.909957] env[62070]: DEBUG nova.compute.manager [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1130.910198] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1130.911141] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16acab3-053f-4a93-869b-68ce85b999c7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.920881] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1130.921146] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-99d0005c-5369-4330-85d4-ace9d07258a7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.929372] env[62070]: DEBUG oslo_vmware.api [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1130.929372] env[62070]: value = "task-1122519" [ 1130.929372] env[62070]: _type = "Task" [ 1130.929372] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.937552] env[62070]: DEBUG oslo_vmware.api [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122519, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.292831] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 65fe3720-95cb-4620-b1c7-eae9e3bc3943] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1131.439813] env[62070]: DEBUG oslo_vmware.api [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122519, 'name': PowerOffVM_Task, 'duration_secs': 0.204193} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.440099] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1131.440277] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1131.440531] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e9c9843d-ee7a-49dc-b739-f367fc0a66a1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.505619] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1131.505854] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1131.506120] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Deleting the datastore file [datastore1] fec23dd4-e956-42dd-b9a2-c8577f77cd81 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1131.506449] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf72a4f4-f486-4bd8-86ce-937fa708afa5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.513119] env[62070]: DEBUG oslo_vmware.api [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1131.513119] env[62070]: value = "task-1122523" [ 1131.513119] env[62070]: _type = "Task" [ 1131.513119] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.520906] env[62070]: DEBUG oslo_vmware.api [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122523, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.796179] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: e74fd58c-cfa8-45c4-8f02-96234b4a9192] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1132.022776] env[62070]: DEBUG oslo_vmware.api [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122523, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161735} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.023058] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1132.023294] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1132.023540] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1132.023825] env[62070]: INFO nova.compute.manager [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1132.024151] env[62070]: DEBUG oslo.service.loopingcall [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1132.024365] env[62070]: DEBUG nova.compute.manager [-] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1132.024462] env[62070]: DEBUG nova.network.neutron [-] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1132.300011] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 4a5f644a-1670-4c6b-a762-f87f1ee4cce5] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1132.464279] env[62070]: DEBUG nova.compute.manager [req-957c4f63-9a6d-43a4-85f2-ce532a187219 req-dcdba23e-8baa-4e97-a9c3-14851f0a2090 service nova] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Received event network-vif-deleted-933a577b-8b0c-4c0d-ae12-372e4b70b7c9 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1132.464484] env[62070]: INFO nova.compute.manager [req-957c4f63-9a6d-43a4-85f2-ce532a187219 req-dcdba23e-8baa-4e97-a9c3-14851f0a2090 service nova] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Neutron deleted interface 933a577b-8b0c-4c0d-ae12-372e4b70b7c9; detaching it from the instance and deleting it from the info cache [ 1132.464668] env[62070]: DEBUG nova.network.neutron [req-957c4f63-9a6d-43a4-85f2-ce532a187219 req-dcdba23e-8baa-4e97-a9c3-14851f0a2090 service nova] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.803258] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 5ec9074b-1237-4404-b13c-a7ca0dbe1d43] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1132.942809] env[62070]: DEBUG nova.network.neutron [-] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.967323] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a051922e-cf66-46b8-a529-1c76b9200b06 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.978531] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48a1640f-6e0e-49a1-b7d9-a983506f1bff {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.007569] env[62070]: DEBUG nova.compute.manager [req-957c4f63-9a6d-43a4-85f2-ce532a187219 req-dcdba23e-8baa-4e97-a9c3-14851f0a2090 service nova] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Detach interface failed, port_id=933a577b-8b0c-4c0d-ae12-372e4b70b7c9, reason: Instance fec23dd4-e956-42dd-b9a2-c8577f77cd81 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1133.307345] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 3d22f50a-e1b7-48f9-a044-df64d01dfeb4] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1133.445922] env[62070]: INFO nova.compute.manager [-] [instance: fec23dd4-e956-42dd-b9a2-c8577f77cd81] Took 1.42 seconds to deallocate network for instance. [ 1133.812489] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 328fbc92-8162-4e12-a02d-6e9cafe0c365] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1133.953718] env[62070]: DEBUG oslo_concurrency.lockutils [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.953718] env[62070]: DEBUG oslo_concurrency.lockutils [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.953896] env[62070]: DEBUG oslo_concurrency.lockutils [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.976313] env[62070]: INFO nova.scheduler.client.report [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Deleted allocations for instance fec23dd4-e956-42dd-b9a2-c8577f77cd81 [ 1134.316010] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: efef4aac-5b74-4a41-9f74-3d4cb4f80cdb] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1134.484120] env[62070]: DEBUG oslo_concurrency.lockutils [None req-49b921aa-4164-4da6-9059-c4e83445f39b tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "fec23dd4-e956-42dd-b9a2-c8577f77cd81" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.579s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.819572] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: c3c6e93c-80be-4e71-87fb-2ff8db8d30fe] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1135.322675] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: f75ed36e-16c8-4a6b-bd39-eb4057ef0691] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1135.780869] env[62070]: DEBUG oslo_concurrency.lockutils [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "9a37cf1a-fd25-48b9-923d-75a95857101b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1135.780869] env[62070]: DEBUG oslo_concurrency.lockutils [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "9a37cf1a-fd25-48b9-923d-75a95857101b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1135.825263] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 2368b649-f931-454c-92cc-971df4155d90] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1136.283217] env[62070]: DEBUG nova.compute.manager [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1136.327841] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 10672096-00ba-4481-8ab3-085a185076db] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1136.340175] env[62070]: DEBUG oslo_concurrency.lockutils [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.340516] env[62070]: DEBUG oslo_concurrency.lockutils [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.340852] env[62070]: DEBUG nova.objects.instance [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lazy-loading 'pci_requests' on Instance uuid 20c4fabc-fc9b-49c7-ab28-fa092ad66038 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1136.801453] env[62070]: DEBUG oslo_concurrency.lockutils [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.831650] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: b7fdf23e-1e39-4745-ae84-38b7fa89aa5d] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1136.845411] env[62070]: DEBUG nova.objects.instance [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lazy-loading 'numa_topology' on Instance uuid 20c4fabc-fc9b-49c7-ab28-fa092ad66038 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1137.335790] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 1ce155c8-9a10-4eff-b428-31889aa8f638] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1137.349557] env[62070]: INFO nova.compute.claims [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1137.838843] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 242cf24f-f9e0-49ca-9b3e-a2b88b3cdcdc] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1138.342126] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 359ae9f2-a907-459e-99b9-3e043d5d015f] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1138.406450] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82bd0195-0bfd-4aa2-b2ee-d40f6ab01914 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.414431] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99ab30b4-589f-4736-83ba-5668537bf1f1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.443915] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b4fbec2-aaf0-469e-adbd-3458b87abcf2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.451201] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b4a39d-6253-4539-ac88-9949749c0674 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.464104] env[62070]: DEBUG nova.compute.provider_tree [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1138.845913] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 748c94c7-1233-44f4-a71a-176b26518399] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1138.966639] env[62070]: DEBUG nova.scheduler.client.report [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1139.352628] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: dd5d90e8-964a-4e1c-a98a-bcba37a1d79e] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1139.471446] env[62070]: DEBUG oslo_concurrency.lockutils [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.131s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1139.474084] env[62070]: DEBUG oslo_concurrency.lockutils [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.673s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1139.475638] env[62070]: INFO nova.compute.claims [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1139.504803] env[62070]: INFO nova.network.neutron [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Updating port 0eb3c7d4-224c-4d94-9450-0623a1e1b162 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1139.856458] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: a3c42653-9a4b-42d3-bc38-8d46d95c8f64] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1140.360589] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 963feecc-ff58-4cbb-8d6f-3f9035337087] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1140.542815] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bde51be-cf5b-4e0b-affd-5cb8e2eacfbf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.550958] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a91ed6ef-e8a6-46ba-8192-070216bc2a22 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.580816] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b95ecc-6a23-4369-86fa-a71bf9383f06 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.588520] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb3c2fb-6a38-40c3-8c9d-2d46a151c22f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.602668] env[62070]: DEBUG nova.compute.provider_tree [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1140.864026] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: d0914f90-200c-4715-aaab-54beacf339b9] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1141.108208] env[62070]: DEBUG nova.scheduler.client.report [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1141.132134] env[62070]: DEBUG nova.compute.manager [req-b0ee7818-a4fb-4c5a-a2b0-65c49a166306 req-b2970cab-f732-42d2-aa78-36897418d5e8 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Received event network-vif-plugged-0eb3c7d4-224c-4d94-9450-0623a1e1b162 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1141.132134] env[62070]: DEBUG oslo_concurrency.lockutils [req-b0ee7818-a4fb-4c5a-a2b0-65c49a166306 req-b2970cab-f732-42d2-aa78-36897418d5e8 service nova] Acquiring lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1141.132134] env[62070]: DEBUG oslo_concurrency.lockutils [req-b0ee7818-a4fb-4c5a-a2b0-65c49a166306 req-b2970cab-f732-42d2-aa78-36897418d5e8 service nova] Lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1141.132134] env[62070]: DEBUG oslo_concurrency.lockutils [req-b0ee7818-a4fb-4c5a-a2b0-65c49a166306 req-b2970cab-f732-42d2-aa78-36897418d5e8 service nova] Lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.132134] env[62070]: DEBUG nova.compute.manager [req-b0ee7818-a4fb-4c5a-a2b0-65c49a166306 req-b2970cab-f732-42d2-aa78-36897418d5e8 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] No waiting events found dispatching network-vif-plugged-0eb3c7d4-224c-4d94-9450-0623a1e1b162 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1141.132134] env[62070]: WARNING nova.compute.manager [req-b0ee7818-a4fb-4c5a-a2b0-65c49a166306 req-b2970cab-f732-42d2-aa78-36897418d5e8 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Received unexpected event network-vif-plugged-0eb3c7d4-224c-4d94-9450-0623a1e1b162 for instance with vm_state shelved_offloaded and task_state spawning. [ 1141.226250] env[62070]: DEBUG oslo_concurrency.lockutils [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "refresh_cache-20c4fabc-fc9b-49c7-ab28-fa092ad66038" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1141.226451] env[62070]: DEBUG oslo_concurrency.lockutils [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquired lock "refresh_cache-20c4fabc-fc9b-49c7-ab28-fa092ad66038" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.226634] env[62070]: DEBUG nova.network.neutron [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1141.367434] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 71aead12-a182-40a7-b5a9-91c01271b800] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1141.611942] env[62070]: DEBUG oslo_concurrency.lockutils [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.138s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.612515] env[62070]: DEBUG nova.compute.manager [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1141.871439] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 0ac963b1-120a-464b-8228-3393135dd182] Instance has had 0 of 5 cleanup attempts {{(pid=62070) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1141.932214] env[62070]: DEBUG nova.network.neutron [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Updating instance_info_cache with network_info: [{"id": "0eb3c7d4-224c-4d94-9450-0623a1e1b162", "address": "fa:16:3e:d9:a2:6e", "network": {"id": "b9ef8f6c-bbd6-409d-a591-ad584e5e028f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-599171324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca25fba006b740f2a86fe10e4abe9400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0eb3c7d4-22", "ovs_interfaceid": "0eb3c7d4-224c-4d94-9450-0623a1e1b162", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.117793] env[62070]: DEBUG nova.compute.utils [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1142.119291] env[62070]: DEBUG nova.compute.manager [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1142.119463] env[62070]: DEBUG nova.network.neutron [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1142.158086] env[62070]: DEBUG nova.policy [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0ab707a4862f42199fc2a91733563cde', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f29ac48ab6544ec0bd1d210aec05dbc5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 1142.374338] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1142.422459] env[62070]: DEBUG nova.network.neutron [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Successfully created port: fe7b579a-99c3-40b9-a975-95ddca559b4d {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1142.434381] env[62070]: DEBUG oslo_concurrency.lockutils [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Releasing lock "refresh_cache-20c4fabc-fc9b-49c7-ab28-fa092ad66038" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1142.463364] env[62070]: DEBUG nova.virt.hardware [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='27549ba01bc56b5527b178a423cb072b',container_format='bare',created_at=2024-10-03T09:32:35Z,direct_url=,disk_format='vmdk',id=8a6c97c1-5195-4f75-914b-69d3dbca246d,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-407047470-shelved',owner='ca25fba006b740f2a86fe10e4abe9400',properties=ImageMetaProps,protected=,size=31669248,status='active',tags=,updated_at=2024-10-03T09:32:50Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1142.463624] env[62070]: DEBUG nova.virt.hardware [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1142.463868] env[62070]: DEBUG nova.virt.hardware [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1142.464489] env[62070]: DEBUG nova.virt.hardware [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1142.464722] env[62070]: DEBUG nova.virt.hardware [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1142.464934] env[62070]: DEBUG nova.virt.hardware [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1142.465226] env[62070]: DEBUG nova.virt.hardware [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1142.465448] env[62070]: DEBUG nova.virt.hardware [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1142.465676] env[62070]: DEBUG nova.virt.hardware [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1142.465895] env[62070]: DEBUG nova.virt.hardware [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1142.466140] env[62070]: DEBUG nova.virt.hardware [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1142.467072] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3012fa7a-dcac-44a6-a1eb-4f3e8ac0d763 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.476421] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c112b8-67f5-48c8-9916-7cb7273ecd7d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.491309] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:a2:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa8c2f93-f287-41b3-adb6-4942a7ea2a0b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0eb3c7d4-224c-4d94-9450-0623a1e1b162', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1142.498924] env[62070]: DEBUG oslo.service.loopingcall [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1142.499257] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1142.499567] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a9e6cdf2-7bdc-46db-90a4-811d871bfafb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.525715] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1142.525715] env[62070]: value = "task-1122530" [ 1142.525715] env[62070]: _type = "Task" [ 1142.525715] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.536697] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122530, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.623897] env[62070]: DEBUG nova.compute.manager [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1143.035646] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122530, 'name': CreateVM_Task, 'duration_secs': 0.31288} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.035941] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1143.036868] env[62070]: DEBUG oslo_concurrency.lockutils [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8a6c97c1-5195-4f75-914b-69d3dbca246d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1143.036868] env[62070]: DEBUG oslo_concurrency.lockutils [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8a6c97c1-5195-4f75-914b-69d3dbca246d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.037068] env[62070]: DEBUG oslo_concurrency.lockutils [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8a6c97c1-5195-4f75-914b-69d3dbca246d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1143.037327] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fef682c2-d69a-4c13-b1a5-0d44b31aa913 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.041556] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1143.041556] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52b70c8c-58fe-f878-1a90-39465ba1a5c5" [ 1143.041556] env[62070]: _type = "Task" [ 1143.041556] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.048770] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52b70c8c-58fe-f878-1a90-39465ba1a5c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.156247] env[62070]: DEBUG nova.compute.manager [req-1b3b941d-3a77-49e9-8fc9-2093f22d7dc2 req-889efb98-c240-4813-abb3-7b1f4d68e666 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Received event network-changed-0eb3c7d4-224c-4d94-9450-0623a1e1b162 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1143.157037] env[62070]: DEBUG nova.compute.manager [req-1b3b941d-3a77-49e9-8fc9-2093f22d7dc2 req-889efb98-c240-4813-abb3-7b1f4d68e666 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Refreshing instance network info cache due to event network-changed-0eb3c7d4-224c-4d94-9450-0623a1e1b162. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1143.157037] env[62070]: DEBUG oslo_concurrency.lockutils [req-1b3b941d-3a77-49e9-8fc9-2093f22d7dc2 req-889efb98-c240-4813-abb3-7b1f4d68e666 service nova] Acquiring lock "refresh_cache-20c4fabc-fc9b-49c7-ab28-fa092ad66038" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1143.157214] env[62070]: DEBUG oslo_concurrency.lockutils [req-1b3b941d-3a77-49e9-8fc9-2093f22d7dc2 req-889efb98-c240-4813-abb3-7b1f4d68e666 service nova] Acquired lock "refresh_cache-20c4fabc-fc9b-49c7-ab28-fa092ad66038" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.157259] env[62070]: DEBUG nova.network.neutron [req-1b3b941d-3a77-49e9-8fc9-2093f22d7dc2 req-889efb98-c240-4813-abb3-7b1f4d68e666 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Refreshing network info cache for port 0eb3c7d4-224c-4d94-9450-0623a1e1b162 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1143.551615] env[62070]: DEBUG oslo_concurrency.lockutils [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8a6c97c1-5195-4f75-914b-69d3dbca246d" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1143.551868] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Processing image 8a6c97c1-5195-4f75-914b-69d3dbca246d {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1143.552169] env[62070]: DEBUG oslo_concurrency.lockutils [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8a6c97c1-5195-4f75-914b-69d3dbca246d/8a6c97c1-5195-4f75-914b-69d3dbca246d.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1143.552336] env[62070]: DEBUG oslo_concurrency.lockutils [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8a6c97c1-5195-4f75-914b-69d3dbca246d/8a6c97c1-5195-4f75-914b-69d3dbca246d.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.552520] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1143.552759] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5156ad48-06c0-4e3a-ab0d-a77cd034bf29 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.561464] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1143.561666] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1143.562404] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb020299-177c-40c4-b1ff-c9e9195886e3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.568620] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1143.568620] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]527a6cdf-0cf0-5840-7ffd-b9f81a860e23" [ 1143.568620] env[62070]: _type = "Task" [ 1143.568620] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.575714] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527a6cdf-0cf0-5840-7ffd-b9f81a860e23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.634143] env[62070]: DEBUG nova.compute.manager [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1143.654395] env[62070]: DEBUG nova.virt.hardware [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1143.654687] env[62070]: DEBUG nova.virt.hardware [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1143.654854] env[62070]: DEBUG nova.virt.hardware [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1143.655057] env[62070]: DEBUG nova.virt.hardware [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1143.655219] env[62070]: DEBUG nova.virt.hardware [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1143.655374] env[62070]: DEBUG nova.virt.hardware [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1143.655592] env[62070]: DEBUG nova.virt.hardware [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1143.655758] env[62070]: DEBUG nova.virt.hardware [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1143.655931] env[62070]: DEBUG nova.virt.hardware [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1143.656111] env[62070]: DEBUG nova.virt.hardware [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1143.656339] env[62070]: DEBUG nova.virt.hardware [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1143.657208] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1964632c-6941-4df5-ae7f-ec8ade0b0927 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.666598] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54e86f80-7063-4c1a-9eac-b6172d46d015 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.902224] env[62070]: DEBUG nova.network.neutron [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Successfully updated port: fe7b579a-99c3-40b9-a975-95ddca559b4d {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1144.084647] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Preparing fetch location {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1144.085007] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Fetch image to [datastore1] OSTACK_IMG_7af8dd2e-7dd7-41c1-8707-28825ef8ee37/OSTACK_IMG_7af8dd2e-7dd7-41c1-8707-28825ef8ee37.vmdk {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1144.085469] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Downloading stream optimized image 8a6c97c1-5195-4f75-914b-69d3dbca246d to [datastore1] OSTACK_IMG_7af8dd2e-7dd7-41c1-8707-28825ef8ee37/OSTACK_IMG_7af8dd2e-7dd7-41c1-8707-28825ef8ee37.vmdk on the data store datastore1 as vApp {{(pid=62070) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1144.085734] env[62070]: DEBUG nova.virt.vmwareapi.images [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Downloading image file data 8a6c97c1-5195-4f75-914b-69d3dbca246d to the ESX as VM named 'OSTACK_IMG_7af8dd2e-7dd7-41c1-8707-28825ef8ee37' {{(pid=62070) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1144.135867] env[62070]: DEBUG nova.network.neutron [req-1b3b941d-3a77-49e9-8fc9-2093f22d7dc2 req-889efb98-c240-4813-abb3-7b1f4d68e666 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Updated VIF entry in instance network info cache for port 0eb3c7d4-224c-4d94-9450-0623a1e1b162. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1144.136314] env[62070]: DEBUG nova.network.neutron [req-1b3b941d-3a77-49e9-8fc9-2093f22d7dc2 req-889efb98-c240-4813-abb3-7b1f4d68e666 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Updating instance_info_cache with network_info: [{"id": "0eb3c7d4-224c-4d94-9450-0623a1e1b162", "address": "fa:16:3e:d9:a2:6e", "network": {"id": "b9ef8f6c-bbd6-409d-a591-ad584e5e028f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-599171324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca25fba006b740f2a86fe10e4abe9400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0eb3c7d4-22", "ovs_interfaceid": "0eb3c7d4-224c-4d94-9450-0623a1e1b162", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1144.163016] env[62070]: DEBUG oslo_vmware.rw_handles [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1144.163016] env[62070]: value = "resgroup-9" [ 1144.163016] env[62070]: _type = "ResourcePool" [ 1144.163016] env[62070]: }. {{(pid=62070) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1144.163321] env[62070]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-b35e01f2-828e-414a-855b-602b969d936b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.184976] env[62070]: DEBUG oslo_vmware.rw_handles [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lease: (returnval){ [ 1144.184976] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5209193b-bccd-2729-2668-04f98ab59fff" [ 1144.184976] env[62070]: _type = "HttpNfcLease" [ 1144.184976] env[62070]: } obtained for vApp import into resource pool (val){ [ 1144.184976] env[62070]: value = "resgroup-9" [ 1144.184976] env[62070]: _type = "ResourcePool" [ 1144.184976] env[62070]: }. {{(pid=62070) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1144.185432] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the lease: (returnval){ [ 1144.185432] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5209193b-bccd-2729-2668-04f98ab59fff" [ 1144.185432] env[62070]: _type = "HttpNfcLease" [ 1144.185432] env[62070]: } to be ready. {{(pid=62070) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1144.192821] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1144.192821] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5209193b-bccd-2729-2668-04f98ab59fff" [ 1144.192821] env[62070]: _type = "HttpNfcLease" [ 1144.192821] env[62070]: } is initializing. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1144.403778] env[62070]: DEBUG oslo_concurrency.lockutils [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "refresh_cache-9a37cf1a-fd25-48b9-923d-75a95857101b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1144.404038] env[62070]: DEBUG oslo_concurrency.lockutils [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired lock "refresh_cache-9a37cf1a-fd25-48b9-923d-75a95857101b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.404117] env[62070]: DEBUG nova.network.neutron [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1144.639552] env[62070]: DEBUG oslo_concurrency.lockutils [req-1b3b941d-3a77-49e9-8fc9-2093f22d7dc2 req-889efb98-c240-4813-abb3-7b1f4d68e666 service nova] Releasing lock "refresh_cache-20c4fabc-fc9b-49c7-ab28-fa092ad66038" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1144.694065] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1144.694065] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5209193b-bccd-2729-2668-04f98ab59fff" [ 1144.694065] env[62070]: _type = "HttpNfcLease" [ 1144.694065] env[62070]: } is ready. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1144.694349] env[62070]: DEBUG oslo_vmware.rw_handles [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1144.694349] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5209193b-bccd-2729-2668-04f98ab59fff" [ 1144.694349] env[62070]: _type = "HttpNfcLease" [ 1144.694349] env[62070]: }. {{(pid=62070) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1144.695042] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba2adda-fd6b-437b-b86a-1f837d997cfc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.701635] env[62070]: DEBUG oslo_vmware.rw_handles [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527d4647-9953-cc27-9cff-857c72d53f43/disk-0.vmdk from lease info. {{(pid=62070) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1144.701812] env[62070]: DEBUG oslo_vmware.rw_handles [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Creating HTTP connection to write to file with size = 31669248 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527d4647-9953-cc27-9cff-857c72d53f43/disk-0.vmdk. {{(pid=62070) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1144.765076] env[62070]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4c169b29-eb40-4613-b5d4-aca9df2ed058 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.938173] env[62070]: DEBUG nova.network.neutron [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1145.080923] env[62070]: DEBUG nova.network.neutron [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Updating instance_info_cache with network_info: [{"id": "fe7b579a-99c3-40b9-a975-95ddca559b4d", "address": "fa:16:3e:72:7f:c3", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe7b579a-99", "ovs_interfaceid": "fe7b579a-99c3-40b9-a975-95ddca559b4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.257623] env[62070]: DEBUG nova.compute.manager [req-3f7a33c8-4cca-4ec4-bfb5-2d1cd7d7b13e req-5af7dcfe-4506-450e-8659-fe9c115f3fca service nova] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Received event network-vif-plugged-fe7b579a-99c3-40b9-a975-95ddca559b4d {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1145.257867] env[62070]: DEBUG oslo_concurrency.lockutils [req-3f7a33c8-4cca-4ec4-bfb5-2d1cd7d7b13e req-5af7dcfe-4506-450e-8659-fe9c115f3fca service nova] Acquiring lock "9a37cf1a-fd25-48b9-923d-75a95857101b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.258162] env[62070]: DEBUG oslo_concurrency.lockutils [req-3f7a33c8-4cca-4ec4-bfb5-2d1cd7d7b13e req-5af7dcfe-4506-450e-8659-fe9c115f3fca service nova] Lock "9a37cf1a-fd25-48b9-923d-75a95857101b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1145.258401] env[62070]: DEBUG oslo_concurrency.lockutils [req-3f7a33c8-4cca-4ec4-bfb5-2d1cd7d7b13e req-5af7dcfe-4506-450e-8659-fe9c115f3fca service nova] Lock "9a37cf1a-fd25-48b9-923d-75a95857101b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1145.258707] env[62070]: DEBUG nova.compute.manager [req-3f7a33c8-4cca-4ec4-bfb5-2d1cd7d7b13e req-5af7dcfe-4506-450e-8659-fe9c115f3fca service nova] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] No waiting events found dispatching network-vif-plugged-fe7b579a-99c3-40b9-a975-95ddca559b4d {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1145.258931] env[62070]: WARNING nova.compute.manager [req-3f7a33c8-4cca-4ec4-bfb5-2d1cd7d7b13e req-5af7dcfe-4506-450e-8659-fe9c115f3fca service nova] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Received unexpected event network-vif-plugged-fe7b579a-99c3-40b9-a975-95ddca559b4d for instance with vm_state building and task_state spawning. [ 1145.259202] env[62070]: DEBUG nova.compute.manager [req-3f7a33c8-4cca-4ec4-bfb5-2d1cd7d7b13e req-5af7dcfe-4506-450e-8659-fe9c115f3fca service nova] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Received event network-changed-fe7b579a-99c3-40b9-a975-95ddca559b4d {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1145.259380] env[62070]: DEBUG nova.compute.manager [req-3f7a33c8-4cca-4ec4-bfb5-2d1cd7d7b13e req-5af7dcfe-4506-450e-8659-fe9c115f3fca service nova] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Refreshing instance network info cache due to event network-changed-fe7b579a-99c3-40b9-a975-95ddca559b4d. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1145.259604] env[62070]: DEBUG oslo_concurrency.lockutils [req-3f7a33c8-4cca-4ec4-bfb5-2d1cd7d7b13e req-5af7dcfe-4506-450e-8659-fe9c115f3fca service nova] Acquiring lock "refresh_cache-9a37cf1a-fd25-48b9-923d-75a95857101b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1145.584022] env[62070]: DEBUG oslo_concurrency.lockutils [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Releasing lock "refresh_cache-9a37cf1a-fd25-48b9-923d-75a95857101b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1145.584390] env[62070]: DEBUG nova.compute.manager [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Instance network_info: |[{"id": "fe7b579a-99c3-40b9-a975-95ddca559b4d", "address": "fa:16:3e:72:7f:c3", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe7b579a-99", "ovs_interfaceid": "fe7b579a-99c3-40b9-a975-95ddca559b4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1145.585954] env[62070]: DEBUG oslo_concurrency.lockutils [req-3f7a33c8-4cca-4ec4-bfb5-2d1cd7d7b13e req-5af7dcfe-4506-450e-8659-fe9c115f3fca service nova] Acquired lock "refresh_cache-9a37cf1a-fd25-48b9-923d-75a95857101b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.586194] env[62070]: DEBUG nova.network.neutron [req-3f7a33c8-4cca-4ec4-bfb5-2d1cd7d7b13e req-5af7dcfe-4506-450e-8659-fe9c115f3fca service nova] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Refreshing network info cache for port fe7b579a-99c3-40b9-a975-95ddca559b4d {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1145.587969] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:72:7f:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1559ce49-7345-443f-bf02-4bfeb88356ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fe7b579a-99c3-40b9-a975-95ddca559b4d', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1145.595779] env[62070]: DEBUG oslo.service.loopingcall [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1145.599231] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1145.599486] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-115c5d41-dc0d-4f28-a194-bbf5e3fd9667 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.625666] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1145.625666] env[62070]: value = "task-1122533" [ 1145.625666] env[62070]: _type = "Task" [ 1145.625666] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.635499] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122533, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.880011] env[62070]: DEBUG oslo_vmware.rw_handles [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Completed reading data from the image iterator. {{(pid=62070) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1145.880295] env[62070]: DEBUG oslo_vmware.rw_handles [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527d4647-9953-cc27-9cff-857c72d53f43/disk-0.vmdk. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1145.881578] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-421563b5-2ba7-4478-82d7-3b3ef95446f3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.888298] env[62070]: DEBUG oslo_vmware.rw_handles [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527d4647-9953-cc27-9cff-857c72d53f43/disk-0.vmdk is in state: ready. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1145.888518] env[62070]: DEBUG oslo_vmware.rw_handles [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527d4647-9953-cc27-9cff-857c72d53f43/disk-0.vmdk. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1145.888773] env[62070]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-3bc590d1-494a-4bf3-bf44-da852e71c016 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.080412] env[62070]: DEBUG oslo_vmware.rw_handles [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527d4647-9953-cc27-9cff-857c72d53f43/disk-0.vmdk. {{(pid=62070) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1146.080689] env[62070]: INFO nova.virt.vmwareapi.images [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Downloaded image file data 8a6c97c1-5195-4f75-914b-69d3dbca246d [ 1146.081571] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a941f1-75c0-4a1d-812a-e9221f7f5691 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.098846] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cd6eb8ff-19e4-4ffe-9025-d38f8842c4f7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.119322] env[62070]: INFO nova.virt.vmwareapi.images [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] The imported VM was unregistered [ 1146.121694] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Caching image {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1146.121936] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Creating directory with path [datastore1] devstack-image-cache_base/8a6c97c1-5195-4f75-914b-69d3dbca246d {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1146.122224] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c280049b-0039-46c6-86c9-d4be7eed1dc6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.133921] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Created directory with path [datastore1] devstack-image-cache_base/8a6c97c1-5195-4f75-914b-69d3dbca246d {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1146.134135] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_7af8dd2e-7dd7-41c1-8707-28825ef8ee37/OSTACK_IMG_7af8dd2e-7dd7-41c1-8707-28825ef8ee37.vmdk to [datastore1] devstack-image-cache_base/8a6c97c1-5195-4f75-914b-69d3dbca246d/8a6c97c1-5195-4f75-914b-69d3dbca246d.vmdk. {{(pid=62070) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1146.134682] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-1b1ab99a-05a9-40b5-bb7c-86e61dfc0afd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.139939] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122533, 'name': CreateVM_Task, 'duration_secs': 0.362438} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.140373] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1146.140966] env[62070]: DEBUG oslo_concurrency.lockutils [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1146.141186] env[62070]: DEBUG oslo_concurrency.lockutils [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.141510] env[62070]: DEBUG oslo_concurrency.lockutils [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1146.141739] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9142a6f-6a9f-41e4-9889-67d9057436a2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.144321] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1146.144321] env[62070]: value = "task-1122536" [ 1146.144321] env[62070]: _type = "Task" [ 1146.144321] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.147877] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1146.147877] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5243fdca-8029-6c03-d98c-d32a2a371ebf" [ 1146.147877] env[62070]: _type = "Task" [ 1146.147877] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.153905] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122536, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.158323] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5243fdca-8029-6c03-d98c-d32a2a371ebf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.329929] env[62070]: DEBUG nova.network.neutron [req-3f7a33c8-4cca-4ec4-bfb5-2d1cd7d7b13e req-5af7dcfe-4506-450e-8659-fe9c115f3fca service nova] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Updated VIF entry in instance network info cache for port fe7b579a-99c3-40b9-a975-95ddca559b4d. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1146.330377] env[62070]: DEBUG nova.network.neutron [req-3f7a33c8-4cca-4ec4-bfb5-2d1cd7d7b13e req-5af7dcfe-4506-450e-8659-fe9c115f3fca service nova] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Updating instance_info_cache with network_info: [{"id": "fe7b579a-99c3-40b9-a975-95ddca559b4d", "address": "fa:16:3e:72:7f:c3", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe7b579a-99", "ovs_interfaceid": "fe7b579a-99c3-40b9-a975-95ddca559b4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1146.660450] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5243fdca-8029-6c03-d98c-d32a2a371ebf, 'name': SearchDatastore_Task, 'duration_secs': 0.013319} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.664040] env[62070]: DEBUG oslo_concurrency.lockutils [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1146.664380] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1146.664675] env[62070]: DEBUG oslo_concurrency.lockutils [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1146.664877] env[62070]: DEBUG oslo_concurrency.lockutils [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.665122] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1146.665473] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122536, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.665738] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a3c0236c-0318-43e7-aa77-307f3094f9fd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.684050] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1146.684369] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1146.685261] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-246d2c4c-4a7e-45be-8e12-eca8fa82ad39 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.692708] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1146.692708] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5279178c-c526-b273-94de-83882e1b128a" [ 1146.692708] env[62070]: _type = "Task" [ 1146.692708] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.702350] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5279178c-c526-b273-94de-83882e1b128a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.833616] env[62070]: DEBUG oslo_concurrency.lockutils [req-3f7a33c8-4cca-4ec4-bfb5-2d1cd7d7b13e req-5af7dcfe-4506-450e-8659-fe9c115f3fca service nova] Releasing lock "refresh_cache-9a37cf1a-fd25-48b9-923d-75a95857101b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1147.157027] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122536, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.205428] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5279178c-c526-b273-94de-83882e1b128a, 'name': SearchDatastore_Task, 'duration_secs': 0.071118} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.206305] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-705af3ef-bdf5-428f-9410-67cec152ae47 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.213070] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1147.213070] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5223162e-5d81-22f5-1cbd-8d3c7fb595bf" [ 1147.213070] env[62070]: _type = "Task" [ 1147.213070] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.222450] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5223162e-5d81-22f5-1cbd-8d3c7fb595bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.659484] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122536, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.725154] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5223162e-5d81-22f5-1cbd-8d3c7fb595bf, 'name': SearchDatastore_Task, 'duration_secs': 0.065053} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.725457] env[62070]: DEBUG oslo_concurrency.lockutils [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1147.725740] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 9a37cf1a-fd25-48b9-923d-75a95857101b/9a37cf1a-fd25-48b9-923d-75a95857101b.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1147.726031] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-68b0b413-6672-4431-afca-a59a4fa8c15e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.740604] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1147.740604] env[62070]: value = "task-1122537" [ 1147.740604] env[62070]: _type = "Task" [ 1147.740604] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.751248] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122537, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.159608] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122536, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.251611] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122537, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.661371] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122536, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.254254} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.661751] env[62070]: INFO nova.virt.vmwareapi.ds_util [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_7af8dd2e-7dd7-41c1-8707-28825ef8ee37/OSTACK_IMG_7af8dd2e-7dd7-41c1-8707-28825ef8ee37.vmdk to [datastore1] devstack-image-cache_base/8a6c97c1-5195-4f75-914b-69d3dbca246d/8a6c97c1-5195-4f75-914b-69d3dbca246d.vmdk. [ 1148.662013] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Cleaning up location [datastore1] OSTACK_IMG_7af8dd2e-7dd7-41c1-8707-28825ef8ee37 {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1148.662254] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_7af8dd2e-7dd7-41c1-8707-28825ef8ee37 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1148.662576] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d84da65f-c37b-429e-b773-232678008204 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.670647] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1148.670647] env[62070]: value = "task-1122538" [ 1148.670647] env[62070]: _type = "Task" [ 1148.670647] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.680934] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122538, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.750399] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122537, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.010152} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.750668] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 9a37cf1a-fd25-48b9-923d-75a95857101b/9a37cf1a-fd25-48b9-923d-75a95857101b.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1148.750890] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1148.751160] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ab03d57b-1ca8-4bfa-b182-4ab7c5d51dd2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.757565] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1148.757565] env[62070]: value = "task-1122539" [ 1148.757565] env[62070]: _type = "Task" [ 1148.757565] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.764413] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122539, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.180856] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122538, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074899} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.181139] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1149.181319] env[62070]: DEBUG oslo_concurrency.lockutils [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8a6c97c1-5195-4f75-914b-69d3dbca246d/8a6c97c1-5195-4f75-914b-69d3dbca246d.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1149.181568] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8a6c97c1-5195-4f75-914b-69d3dbca246d/8a6c97c1-5195-4f75-914b-69d3dbca246d.vmdk to [datastore1] 20c4fabc-fc9b-49c7-ab28-fa092ad66038/20c4fabc-fc9b-49c7-ab28-fa092ad66038.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1149.181817] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3a10685f-f002-460a-837a-ebebb49c0e52 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.188173] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Acquiring lock "c2b65119-77ff-437b-8f7e-cf6e83d907bb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1149.188421] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Lock "c2b65119-77ff-437b-8f7e-cf6e83d907bb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1149.192182] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1149.192182] env[62070]: value = "task-1122540" [ 1149.192182] env[62070]: _type = "Task" [ 1149.192182] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.200207] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122540, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.266812] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122539, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087962} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.267034] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1149.267781] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-100fd42a-f835-4a5d-b62e-e9d3494d9dff {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.288835] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] 9a37cf1a-fd25-48b9-923d-75a95857101b/9a37cf1a-fd25-48b9-923d-75a95857101b.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1149.289110] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6866b762-ca85-45f8-bd01-cebd1372c481 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.307586] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1149.307586] env[62070]: value = "task-1122541" [ 1149.307586] env[62070]: _type = "Task" [ 1149.307586] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.315260] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122541, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.690954] env[62070]: DEBUG nova.compute.manager [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1149.708454] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122540, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.820068] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122541, 'name': ReconfigVM_Task, 'duration_secs': 0.302533} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.820291] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Reconfigured VM instance instance-0000006d to attach disk [datastore1] 9a37cf1a-fd25-48b9-923d-75a95857101b/9a37cf1a-fd25-48b9-923d-75a95857101b.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1149.820965] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f4b6d625-abf0-4100-ba31-75936631a086 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.830631] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1149.830631] env[62070]: value = "task-1122542" [ 1149.830631] env[62070]: _type = "Task" [ 1149.830631] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.843361] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122542, 'name': Rename_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.207651] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122540, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.226103] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1150.226430] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1150.228105] env[62070]: INFO nova.compute.claims [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1150.343654] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122542, 'name': Rename_Task, 'duration_secs': 0.167026} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.343963] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1150.344290] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b9e6bbd0-2e12-4d04-ae63-4b17759f4ac2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.353188] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1150.353188] env[62070]: value = "task-1122543" [ 1150.353188] env[62070]: _type = "Task" [ 1150.353188] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.363020] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122543, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.705638] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122540, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.866657] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122543, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.206197] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122540, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.304151] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e575c32-7dcb-462b-b2f0-e2f18f5665df {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.314688] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e32887-cf85-41b8-b519-24f9a53c30c2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.346427] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f859ff82-3106-4651-b735-4d99d9f6b7f7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.361014] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6171af6-0d12-45e9-b7ee-35d896831001 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.375134] env[62070]: DEBUG nova.compute.provider_tree [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1151.379569] env[62070]: DEBUG oslo_vmware.api [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122543, 'name': PowerOnVM_Task, 'duration_secs': 0.51866} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.380188] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1151.380396] env[62070]: INFO nova.compute.manager [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Took 7.75 seconds to spawn the instance on the hypervisor. [ 1151.380589] env[62070]: DEBUG nova.compute.manager [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1151.381417] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3369c3-237b-4d6f-a052-2902ee906d50 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.706098] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122540, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.342427} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.706098] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8a6c97c1-5195-4f75-914b-69d3dbca246d/8a6c97c1-5195-4f75-914b-69d3dbca246d.vmdk to [datastore1] 20c4fabc-fc9b-49c7-ab28-fa092ad66038/20c4fabc-fc9b-49c7-ab28-fa092ad66038.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1151.706534] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-103dbb17-330e-43b0-9732-d621c3a0dbba {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.727403] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 20c4fabc-fc9b-49c7-ab28-fa092ad66038/20c4fabc-fc9b-49c7-ab28-fa092ad66038.vmdk or device None with type streamOptimized {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1151.727661] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74c9c42e-50a3-4040-bd00-0ffdfec046f9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.746627] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1151.746627] env[62070]: value = "task-1122544" [ 1151.746627] env[62070]: _type = "Task" [ 1151.746627] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.754554] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122544, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.882018] env[62070]: DEBUG nova.scheduler.client.report [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1151.901105] env[62070]: INFO nova.compute.manager [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Took 15.11 seconds to build instance. [ 1152.256598] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122544, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.386709] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.160s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.387400] env[62070]: DEBUG nova.compute.manager [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1152.402826] env[62070]: DEBUG oslo_concurrency.lockutils [None req-59c2ba03-a8ac-43ac-99d7-177e6a741fc2 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "9a37cf1a-fd25-48b9-923d-75a95857101b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.622s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.521966] env[62070]: DEBUG nova.compute.manager [req-8715e388-7e7e-4b6e-b1b3-06ce715a8c79 req-48229cbc-56d4-4077-be97-896699cd9f7d service nova] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Received event network-changed-fe7b579a-99c3-40b9-a975-95ddca559b4d {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1152.522191] env[62070]: DEBUG nova.compute.manager [req-8715e388-7e7e-4b6e-b1b3-06ce715a8c79 req-48229cbc-56d4-4077-be97-896699cd9f7d service nova] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Refreshing instance network info cache due to event network-changed-fe7b579a-99c3-40b9-a975-95ddca559b4d. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1152.522416] env[62070]: DEBUG oslo_concurrency.lockutils [req-8715e388-7e7e-4b6e-b1b3-06ce715a8c79 req-48229cbc-56d4-4077-be97-896699cd9f7d service nova] Acquiring lock "refresh_cache-9a37cf1a-fd25-48b9-923d-75a95857101b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1152.522567] env[62070]: DEBUG oslo_concurrency.lockutils [req-8715e388-7e7e-4b6e-b1b3-06ce715a8c79 req-48229cbc-56d4-4077-be97-896699cd9f7d service nova] Acquired lock "refresh_cache-9a37cf1a-fd25-48b9-923d-75a95857101b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.522733] env[62070]: DEBUG nova.network.neutron [req-8715e388-7e7e-4b6e-b1b3-06ce715a8c79 req-48229cbc-56d4-4077-be97-896699cd9f7d service nova] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Refreshing network info cache for port fe7b579a-99c3-40b9-a975-95ddca559b4d {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1152.758497] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122544, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.893253] env[62070]: DEBUG nova.compute.utils [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1152.894755] env[62070]: DEBUG nova.compute.manager [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1152.894921] env[62070]: DEBUG nova.network.neutron [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1152.958535] env[62070]: DEBUG nova.policy [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4346caae827f4b83994ed1bc20cc97e1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e72113f50eaf49b4bef0f953da1e7216', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 1153.250320] env[62070]: DEBUG nova.network.neutron [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Successfully created port: 7e82bdd5-e879-4fcd-bb16-f0a1be82998b {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1153.262503] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122544, 'name': ReconfigVM_Task, 'duration_secs': 1.447671} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.264863] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 20c4fabc-fc9b-49c7-ab28-fa092ad66038/20c4fabc-fc9b-49c7-ab28-fa092ad66038.vmdk or device None with type streamOptimized {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1153.265988] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_format': None, 'guest_format': None, 'size': 0, 'boot_index': 0, 'device_name': '/dev/sda', 'device_type': 'disk', 'encryption_secret_uuid': None, 'disk_bus': None, 'encrypted': False, 'encryption_options': None, 'image_type': None, 'image_id': '43ea607c-7ece-4601-9b11-75c6a16aa7dd'}], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'boot_index': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245524', 'volume_id': 'a9b04c95-bddc-41c3-99a5-c067625df8d2', 'name': 'volume-a9b04c95-bddc-41c3-99a5-c067625df8d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '20c4fabc-fc9b-49c7-ab28-fa092ad66038', 'attached_at': '', 'detached_at': '', 'volume_id': 'a9b04c95-bddc-41c3-99a5-c067625df8d2', 'serial': 'a9b04c95-bddc-41c3-99a5-c067625df8d2'}, 'device_type': None, 'mount_device': '/dev/sdb', 'disk_bus': None, 'delete_on_termination': False, 'attachment_id': '5b24c746-e43b-478b-a770-80edc9d4c61f', 'volume_type': None}], 'swap': None} {{(pid=62070) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1153.266218] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Volume attach. Driver type: vmdk {{(pid=62070) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1153.266417] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245524', 'volume_id': 'a9b04c95-bddc-41c3-99a5-c067625df8d2', 'name': 'volume-a9b04c95-bddc-41c3-99a5-c067625df8d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '20c4fabc-fc9b-49c7-ab28-fa092ad66038', 'attached_at': '', 'detached_at': '', 'volume_id': 'a9b04c95-bddc-41c3-99a5-c067625df8d2', 'serial': 'a9b04c95-bddc-41c3-99a5-c067625df8d2'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1153.267234] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebab5d34-be45-4a3d-9f23-4c0e34e90716 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.282865] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a0acee3-a29a-470c-8753-cd629352a285 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.306558] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] volume-a9b04c95-bddc-41c3-99a5-c067625df8d2/volume-a9b04c95-bddc-41c3-99a5-c067625df8d2.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1153.309139] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a1a87ce-f455-442b-94d4-186d07ef4fa8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.328653] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1153.328653] env[62070]: value = "task-1122545" [ 1153.328653] env[62070]: _type = "Task" [ 1153.328653] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.338799] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122545, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.356742] env[62070]: DEBUG nova.network.neutron [req-8715e388-7e7e-4b6e-b1b3-06ce715a8c79 req-48229cbc-56d4-4077-be97-896699cd9f7d service nova] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Updated VIF entry in instance network info cache for port fe7b579a-99c3-40b9-a975-95ddca559b4d. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1153.357227] env[62070]: DEBUG nova.network.neutron [req-8715e388-7e7e-4b6e-b1b3-06ce715a8c79 req-48229cbc-56d4-4077-be97-896699cd9f7d service nova] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Updating instance_info_cache with network_info: [{"id": "fe7b579a-99c3-40b9-a975-95ddca559b4d", "address": "fa:16:3e:72:7f:c3", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe7b579a-99", "ovs_interfaceid": "fe7b579a-99c3-40b9-a975-95ddca559b4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1153.398596] env[62070]: DEBUG nova.compute.manager [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1153.838902] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122545, 'name': ReconfigVM_Task, 'duration_secs': 0.303234} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.839255] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Reconfigured VM instance instance-00000069 to attach disk [datastore1] volume-a9b04c95-bddc-41c3-99a5-c067625df8d2/volume-a9b04c95-bddc-41c3-99a5-c067625df8d2.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1153.843991] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d06539c-4bf1-4c9d-85b2-8cf6115fe315 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.858748] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1153.858748] env[62070]: value = "task-1122546" [ 1153.858748] env[62070]: _type = "Task" [ 1153.858748] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.859303] env[62070]: DEBUG oslo_concurrency.lockutils [req-8715e388-7e7e-4b6e-b1b3-06ce715a8c79 req-48229cbc-56d4-4077-be97-896699cd9f7d service nova] Releasing lock "refresh_cache-9a37cf1a-fd25-48b9-923d-75a95857101b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1153.869460] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122546, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.904273] env[62070]: INFO nova.virt.block_device [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Booting with volume b510da93-2b86-4e5d-9ac1-6bde27407e94 at /dev/sda [ 1153.940605] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b8022bd4-7f02-4ba9-9628-5b41378e4a8c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.949807] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64757bbd-46d4-4654-9d24-e95fd668a9da {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.975958] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-15929e7e-2bd3-4ed3-8fea-ca3dee2e1a43 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.984177] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-412abc54-18cc-451d-b033-6381a9b16f72 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.010018] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab8b5a5-efe7-49bf-a20f-55433b5602a1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.016276] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c25a51-8e18-4c67-ba38-f4fdb15ce874 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.028644] env[62070]: DEBUG nova.virt.block_device [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Updating existing volume attachment record: 66ea4a29-8d63-4e6e-9fe8-4e082f5fb36a {{(pid=62070) _volume_attach /opt/stack/nova/nova/virt/block_device.py:679}} [ 1154.368645] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122546, 'name': ReconfigVM_Task, 'duration_secs': 0.141979} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.368955] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245524', 'volume_id': 'a9b04c95-bddc-41c3-99a5-c067625df8d2', 'name': 'volume-a9b04c95-bddc-41c3-99a5-c067625df8d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '20c4fabc-fc9b-49c7-ab28-fa092ad66038', 'attached_at': '', 'detached_at': '', 'volume_id': 'a9b04c95-bddc-41c3-99a5-c067625df8d2', 'serial': 'a9b04c95-bddc-41c3-99a5-c067625df8d2'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1154.369614] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-31506ff9-a0e9-4288-ba64-966d744e22c5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.376375] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1154.376375] env[62070]: value = "task-1122547" [ 1154.376375] env[62070]: _type = "Task" [ 1154.376375] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.387718] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122547, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.636904] env[62070]: DEBUG nova.compute.manager [req-e74e0a40-4693-4daa-a577-2bf72d613833 req-14a61240-9ac7-44f4-873a-61cea6142b2b service nova] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Received event network-vif-plugged-7e82bdd5-e879-4fcd-bb16-f0a1be82998b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1154.637328] env[62070]: DEBUG oslo_concurrency.lockutils [req-e74e0a40-4693-4daa-a577-2bf72d613833 req-14a61240-9ac7-44f4-873a-61cea6142b2b service nova] Acquiring lock "c2b65119-77ff-437b-8f7e-cf6e83d907bb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1154.637527] env[62070]: DEBUG oslo_concurrency.lockutils [req-e74e0a40-4693-4daa-a577-2bf72d613833 req-14a61240-9ac7-44f4-873a-61cea6142b2b service nova] Lock "c2b65119-77ff-437b-8f7e-cf6e83d907bb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1154.637735] env[62070]: DEBUG oslo_concurrency.lockutils [req-e74e0a40-4693-4daa-a577-2bf72d613833 req-14a61240-9ac7-44f4-873a-61cea6142b2b service nova] Lock "c2b65119-77ff-437b-8f7e-cf6e83d907bb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1154.637865] env[62070]: DEBUG nova.compute.manager [req-e74e0a40-4693-4daa-a577-2bf72d613833 req-14a61240-9ac7-44f4-873a-61cea6142b2b service nova] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] No waiting events found dispatching network-vif-plugged-7e82bdd5-e879-4fcd-bb16-f0a1be82998b {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1154.638051] env[62070]: WARNING nova.compute.manager [req-e74e0a40-4693-4daa-a577-2bf72d613833 req-14a61240-9ac7-44f4-873a-61cea6142b2b service nova] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Received unexpected event network-vif-plugged-7e82bdd5-e879-4fcd-bb16-f0a1be82998b for instance with vm_state building and task_state block_device_mapping. [ 1154.726024] env[62070]: DEBUG nova.network.neutron [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Successfully updated port: 7e82bdd5-e879-4fcd-bb16-f0a1be82998b {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1154.886500] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122547, 'name': Rename_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.225614] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Acquiring lock "refresh_cache-c2b65119-77ff-437b-8f7e-cf6e83d907bb" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1155.225774] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Acquired lock "refresh_cache-c2b65119-77ff-437b-8f7e-cf6e83d907bb" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.225927] env[62070]: DEBUG nova.network.neutron [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1155.386626] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122547, 'name': Rename_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.755294] env[62070]: DEBUG nova.network.neutron [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1155.870521] env[62070]: DEBUG nova.network.neutron [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Updating instance_info_cache with network_info: [{"id": "7e82bdd5-e879-4fcd-bb16-f0a1be82998b", "address": "fa:16:3e:b0:b9:4e", "network": {"id": "287f2df1-5ec3-4168-9fdb-db3b3fdc0f73", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1787968104-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e72113f50eaf49b4bef0f953da1e7216", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27e0a333-0cad-496c-8e6e-37a2edc97ac4", "external-id": "nsx-vlan-transportzone-83", "segmentation_id": 83, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e82bdd5-e8", "ovs_interfaceid": "7e82bdd5-e879-4fcd-bb16-f0a1be82998b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.887187] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122547, 'name': Rename_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.110572] env[62070]: DEBUG nova.compute.manager [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1156.111131] env[62070]: DEBUG nova.virt.hardware [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1156.111372] env[62070]: DEBUG nova.virt.hardware [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1156.111559] env[62070]: DEBUG nova.virt.hardware [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1156.111759] env[62070]: DEBUG nova.virt.hardware [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1156.111914] env[62070]: DEBUG nova.virt.hardware [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1156.112079] env[62070]: DEBUG nova.virt.hardware [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1156.112295] env[62070]: DEBUG nova.virt.hardware [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1156.112460] env[62070]: DEBUG nova.virt.hardware [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1156.112634] env[62070]: DEBUG nova.virt.hardware [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1156.112801] env[62070]: DEBUG nova.virt.hardware [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1156.112983] env[62070]: DEBUG nova.virt.hardware [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1156.113858] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c66e45b-71c8-4082-ad79-5824be8774f9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.123697] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62e78646-4ec8-4747-80fa-5d3a1fc0989e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.372963] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Releasing lock "refresh_cache-c2b65119-77ff-437b-8f7e-cf6e83d907bb" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1156.373300] env[62070]: DEBUG nova.compute.manager [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Instance network_info: |[{"id": "7e82bdd5-e879-4fcd-bb16-f0a1be82998b", "address": "fa:16:3e:b0:b9:4e", "network": {"id": "287f2df1-5ec3-4168-9fdb-db3b3fdc0f73", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1787968104-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e72113f50eaf49b4bef0f953da1e7216", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27e0a333-0cad-496c-8e6e-37a2edc97ac4", "external-id": "nsx-vlan-transportzone-83", "segmentation_id": 83, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e82bdd5-e8", "ovs_interfaceid": "7e82bdd5-e879-4fcd-bb16-f0a1be82998b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1156.373755] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:b9:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27e0a333-0cad-496c-8e6e-37a2edc97ac4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e82bdd5-e879-4fcd-bb16-f0a1be82998b', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1156.381080] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Creating folder: Project (e72113f50eaf49b4bef0f953da1e7216). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1156.381413] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4ea8ae3e-8035-4e0b-9d99-307828d437ff {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.391448] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122547, 'name': Rename_Task, 'duration_secs': 1.580435} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.391697] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1156.391922] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a513f4f9-f58c-4777-af9d-af89d7d7b32a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.397858] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1156.397858] env[62070]: value = "task-1122549" [ 1156.397858] env[62070]: _type = "Task" [ 1156.397858] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.398535] env[62070]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1156.398678] env[62070]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62070) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1156.399341] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Folder already exists: Project (e72113f50eaf49b4bef0f953da1e7216). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1156.399529] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Creating folder: Instances. Parent ref: group-v245520. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1156.402150] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8b939c8e-1556-4c80-9478-2ca187910470 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.408088] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122549, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.411299] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Created folder: Instances in parent group-v245520. [ 1156.411547] env[62070]: DEBUG oslo.service.loopingcall [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1156.411734] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1156.411918] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d994569-db99-49ec-bb59-76751dae2bd0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.429090] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1156.429090] env[62070]: value = "task-1122551" [ 1156.429090] env[62070]: _type = "Task" [ 1156.429090] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.435994] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122551, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.664602] env[62070]: DEBUG nova.compute.manager [req-fba4bcff-603d-4a45-ba84-ae77aa7efc9f req-6cd64f87-8d01-4db5-ad3c-94d46038b413 service nova] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Received event network-changed-7e82bdd5-e879-4fcd-bb16-f0a1be82998b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1156.664772] env[62070]: DEBUG nova.compute.manager [req-fba4bcff-603d-4a45-ba84-ae77aa7efc9f req-6cd64f87-8d01-4db5-ad3c-94d46038b413 service nova] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Refreshing instance network info cache due to event network-changed-7e82bdd5-e879-4fcd-bb16-f0a1be82998b. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1156.665034] env[62070]: DEBUG oslo_concurrency.lockutils [req-fba4bcff-603d-4a45-ba84-ae77aa7efc9f req-6cd64f87-8d01-4db5-ad3c-94d46038b413 service nova] Acquiring lock "refresh_cache-c2b65119-77ff-437b-8f7e-cf6e83d907bb" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1156.665215] env[62070]: DEBUG oslo_concurrency.lockutils [req-fba4bcff-603d-4a45-ba84-ae77aa7efc9f req-6cd64f87-8d01-4db5-ad3c-94d46038b413 service nova] Acquired lock "refresh_cache-c2b65119-77ff-437b-8f7e-cf6e83d907bb" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.665403] env[62070]: DEBUG nova.network.neutron [req-fba4bcff-603d-4a45-ba84-ae77aa7efc9f req-6cd64f87-8d01-4db5-ad3c-94d46038b413 service nova] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Refreshing network info cache for port 7e82bdd5-e879-4fcd-bb16-f0a1be82998b {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1156.908711] env[62070]: DEBUG oslo_vmware.api [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122549, 'name': PowerOnVM_Task, 'duration_secs': 0.445705} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.909106] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1156.938700] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122551, 'name': CreateVM_Task, 'duration_secs': 0.292656} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.938882] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1156.939582] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245525', 'volume_id': 'b510da93-2b86-4e5d-9ac1-6bde27407e94', 'name': 'volume-b510da93-2b86-4e5d-9ac1-6bde27407e94', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c2b65119-77ff-437b-8f7e-cf6e83d907bb', 'attached_at': '', 'detached_at': '', 'volume_id': 'b510da93-2b86-4e5d-9ac1-6bde27407e94', 'serial': 'b510da93-2b86-4e5d-9ac1-6bde27407e94'}, 'device_type': None, 'mount_device': '/dev/sda', 'disk_bus': None, 'delete_on_termination': True, 'attachment_id': '66ea4a29-8d63-4e6e-9fe8-4e082f5fb36a', 'volume_type': None}], 'swap': None} {{(pid=62070) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1156.939800] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Root volume attach. Driver type: vmdk {{(pid=62070) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1156.940606] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e21b34e6-f70c-4364-a174-a37f00781cb5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.948635] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec281cab-b24a-46db-94ca-789eddabd96c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.954647] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84108c0c-1119-4029-b7b7-8c6b37a47950 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.960419] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-fcf3a204-bc02-47c1-85a6-97d7f3f8b668 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.967296] env[62070]: DEBUG oslo_vmware.api [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Waiting for the task: (returnval){ [ 1156.967296] env[62070]: value = "task-1122552" [ 1156.967296] env[62070]: _type = "Task" [ 1156.967296] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.975438] env[62070]: DEBUG oslo_vmware.api [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Task: {'id': task-1122552, 'name': RelocateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.013893] env[62070]: DEBUG nova.compute.manager [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1157.014820] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33aa2e80-3570-4f7d-9bb3-b678817c5170 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.437150] env[62070]: DEBUG nova.network.neutron [req-fba4bcff-603d-4a45-ba84-ae77aa7efc9f req-6cd64f87-8d01-4db5-ad3c-94d46038b413 service nova] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Updated VIF entry in instance network info cache for port 7e82bdd5-e879-4fcd-bb16-f0a1be82998b. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1157.437695] env[62070]: DEBUG nova.network.neutron [req-fba4bcff-603d-4a45-ba84-ae77aa7efc9f req-6cd64f87-8d01-4db5-ad3c-94d46038b413 service nova] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Updating instance_info_cache with network_info: [{"id": "7e82bdd5-e879-4fcd-bb16-f0a1be82998b", "address": "fa:16:3e:b0:b9:4e", "network": {"id": "287f2df1-5ec3-4168-9fdb-db3b3fdc0f73", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1787968104-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e72113f50eaf49b4bef0f953da1e7216", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27e0a333-0cad-496c-8e6e-37a2edc97ac4", "external-id": "nsx-vlan-transportzone-83", "segmentation_id": 83, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e82bdd5-e8", "ovs_interfaceid": "7e82bdd5-e879-4fcd-bb16-f0a1be82998b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.477021] env[62070]: DEBUG oslo_vmware.api [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Task: {'id': task-1122552, 'name': RelocateVM_Task} progress is 43%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.534313] env[62070]: DEBUG oslo_concurrency.lockutils [None req-94d1c2d2-1965-464f-a9bf-5ad1d56ca955 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 28.425s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1157.941065] env[62070]: DEBUG oslo_concurrency.lockutils [req-fba4bcff-603d-4a45-ba84-ae77aa7efc9f req-6cd64f87-8d01-4db5-ad3c-94d46038b413 service nova] Releasing lock "refresh_cache-c2b65119-77ff-437b-8f7e-cf6e83d907bb" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1157.978697] env[62070]: DEBUG oslo_vmware.api [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Task: {'id': task-1122552, 'name': RelocateVM_Task} progress is 58%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.479716] env[62070]: DEBUG oslo_vmware.api [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Task: {'id': task-1122552, 'name': RelocateVM_Task} progress is 73%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.980434] env[62070]: DEBUG oslo_vmware.api [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Task: {'id': task-1122552, 'name': RelocateVM_Task} progress is 86%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.387678] env[62070]: DEBUG oslo_concurrency.lockutils [None req-438280da-957e-4809-a07b-b49bafb64711 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1159.387962] env[62070]: DEBUG oslo_concurrency.lockutils [None req-438280da-957e-4809-a07b-b49bafb64711 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1159.480422] env[62070]: DEBUG oslo_vmware.api [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Task: {'id': task-1122552, 'name': RelocateVM_Task} progress is 97%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.892069] env[62070]: DEBUG nova.compute.utils [None req-438280da-957e-4809-a07b-b49bafb64711 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1159.985071] env[62070]: DEBUG oslo_vmware.api [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Task: {'id': task-1122552, 'name': RelocateVM_Task} progress is 97%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.394749] env[62070]: DEBUG oslo_concurrency.lockutils [None req-438280da-957e-4809-a07b-b49bafb64711 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1160.481784] env[62070]: DEBUG oslo_vmware.api [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Task: {'id': task-1122552, 'name': RelocateVM_Task} progress is 98%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.982014] env[62070]: DEBUG oslo_vmware.api [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Task: {'id': task-1122552, 'name': RelocateVM_Task, 'duration_secs': 3.987897} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.982334] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Volume attach. Driver type: vmdk {{(pid=62070) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1160.982542] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245525', 'volume_id': 'b510da93-2b86-4e5d-9ac1-6bde27407e94', 'name': 'volume-b510da93-2b86-4e5d-9ac1-6bde27407e94', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c2b65119-77ff-437b-8f7e-cf6e83d907bb', 'attached_at': '', 'detached_at': '', 'volume_id': 'b510da93-2b86-4e5d-9ac1-6bde27407e94', 'serial': 'b510da93-2b86-4e5d-9ac1-6bde27407e94'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1160.983317] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e65156d-7669-4606-a31d-ebf7201047ef {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.000095] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32323f0-32be-4ca7-88f1-2715c30b968b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.021232] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] volume-b510da93-2b86-4e5d-9ac1-6bde27407e94/volume-b510da93-2b86-4e5d-9ac1-6bde27407e94.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1161.021496] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e1e84c8-921b-456f-a128-6ce1524329e6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.040213] env[62070]: DEBUG oslo_vmware.api [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Waiting for the task: (returnval){ [ 1161.040213] env[62070]: value = "task-1122553" [ 1161.040213] env[62070]: _type = "Task" [ 1161.040213] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.047345] env[62070]: DEBUG oslo_vmware.api [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Task: {'id': task-1122553, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.456976] env[62070]: DEBUG oslo_concurrency.lockutils [None req-438280da-957e-4809-a07b-b49bafb64711 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1161.456976] env[62070]: DEBUG oslo_concurrency.lockutils [None req-438280da-957e-4809-a07b-b49bafb64711 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1161.456976] env[62070]: INFO nova.compute.manager [None req-438280da-957e-4809-a07b-b49bafb64711 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Attaching volume 4e71ebf7-c7f2-42ea-999e-8d6b06ae2d8a to /dev/sdb [ 1161.488241] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c050eac3-9168-45c9-807b-90316647ec6e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.495380] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f85124e-1710-4595-9b2e-19e5df1d475b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.508833] env[62070]: DEBUG nova.virt.block_device [None req-438280da-957e-4809-a07b-b49bafb64711 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Updating existing volume attachment record: 7c84120c-627d-40d0-91c9-a41f6ffc5feb {{(pid=62070) _volume_attach /opt/stack/nova/nova/virt/block_device.py:679}} [ 1161.549923] env[62070]: DEBUG oslo_vmware.api [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Task: {'id': task-1122553, 'name': ReconfigVM_Task, 'duration_secs': 0.394871} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.550276] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Reconfigured VM instance instance-0000006e to attach disk [datastore2] volume-b510da93-2b86-4e5d-9ac1-6bde27407e94/volume-b510da93-2b86-4e5d-9ac1-6bde27407e94.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1161.554873] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01e9377b-fa0c-4d5c-ae8f-4e110102d96e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.569228] env[62070]: DEBUG oslo_vmware.api [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Waiting for the task: (returnval){ [ 1161.569228] env[62070]: value = "task-1122554" [ 1161.569228] env[62070]: _type = "Task" [ 1161.569228] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.576614] env[62070]: DEBUG oslo_vmware.api [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Task: {'id': task-1122554, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.078465] env[62070]: DEBUG oslo_vmware.api [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Task: {'id': task-1122554, 'name': ReconfigVM_Task, 'duration_secs': 0.150693} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.078808] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245525', 'volume_id': 'b510da93-2b86-4e5d-9ac1-6bde27407e94', 'name': 'volume-b510da93-2b86-4e5d-9ac1-6bde27407e94', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c2b65119-77ff-437b-8f7e-cf6e83d907bb', 'attached_at': '', 'detached_at': '', 'volume_id': 'b510da93-2b86-4e5d-9ac1-6bde27407e94', 'serial': 'b510da93-2b86-4e5d-9ac1-6bde27407e94'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1162.079318] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-87d1ee0f-a8a2-40c4-9563-91d231d00179 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.085946] env[62070]: DEBUG oslo_vmware.api [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Waiting for the task: (returnval){ [ 1162.085946] env[62070]: value = "task-1122556" [ 1162.085946] env[62070]: _type = "Task" [ 1162.085946] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.092955] env[62070]: DEBUG oslo_vmware.api [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Task: {'id': task-1122556, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.595948] env[62070]: DEBUG oslo_vmware.api [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Task: {'id': task-1122556, 'name': Rename_Task, 'duration_secs': 0.121736} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.596256] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1162.596498] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2392d8b1-2265-4ce5-a030-5389eec274a7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.603019] env[62070]: DEBUG oslo_vmware.api [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Waiting for the task: (returnval){ [ 1162.603019] env[62070]: value = "task-1122557" [ 1162.603019] env[62070]: _type = "Task" [ 1162.603019] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.609887] env[62070]: DEBUG oslo_vmware.api [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Task: {'id': task-1122557, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.112865] env[62070]: DEBUG oslo_vmware.api [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Task: {'id': task-1122557, 'name': PowerOnVM_Task, 'duration_secs': 0.459638} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.113174] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1163.113409] env[62070]: INFO nova.compute.manager [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Took 7.00 seconds to spawn the instance on the hypervisor. [ 1163.113710] env[62070]: DEBUG nova.compute.manager [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1163.114590] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29329a87-874b-4db0-a673-b05fd67fa316 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.630640] env[62070]: INFO nova.compute.manager [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Took 13.43 seconds to build instance. [ 1164.132355] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c719f97e-2f0f-4457-aabc-89498aabec7b tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Lock "c2b65119-77ff-437b-8f7e-cf6e83d907bb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.944s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1164.742308] env[62070]: DEBUG nova.compute.manager [req-9d97e69f-ddf4-492d-a26d-ff297549e3f0 req-799a21f3-267c-4ecb-b8c8-613f25d9fa12 service nova] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Received event network-changed-7e82bdd5-e879-4fcd-bb16-f0a1be82998b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1164.742455] env[62070]: DEBUG nova.compute.manager [req-9d97e69f-ddf4-492d-a26d-ff297549e3f0 req-799a21f3-267c-4ecb-b8c8-613f25d9fa12 service nova] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Refreshing instance network info cache due to event network-changed-7e82bdd5-e879-4fcd-bb16-f0a1be82998b. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1164.742647] env[62070]: DEBUG oslo_concurrency.lockutils [req-9d97e69f-ddf4-492d-a26d-ff297549e3f0 req-799a21f3-267c-4ecb-b8c8-613f25d9fa12 service nova] Acquiring lock "refresh_cache-c2b65119-77ff-437b-8f7e-cf6e83d907bb" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1164.742867] env[62070]: DEBUG oslo_concurrency.lockutils [req-9d97e69f-ddf4-492d-a26d-ff297549e3f0 req-799a21f3-267c-4ecb-b8c8-613f25d9fa12 service nova] Acquired lock "refresh_cache-c2b65119-77ff-437b-8f7e-cf6e83d907bb" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1164.742943] env[62070]: DEBUG nova.network.neutron [req-9d97e69f-ddf4-492d-a26d-ff297549e3f0 req-799a21f3-267c-4ecb-b8c8-613f25d9fa12 service nova] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Refreshing network info cache for port 7e82bdd5-e879-4fcd-bb16-f0a1be82998b {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1165.470345] env[62070]: DEBUG nova.network.neutron [req-9d97e69f-ddf4-492d-a26d-ff297549e3f0 req-799a21f3-267c-4ecb-b8c8-613f25d9fa12 service nova] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Updated VIF entry in instance network info cache for port 7e82bdd5-e879-4fcd-bb16-f0a1be82998b. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1165.470741] env[62070]: DEBUG nova.network.neutron [req-9d97e69f-ddf4-492d-a26d-ff297549e3f0 req-799a21f3-267c-4ecb-b8c8-613f25d9fa12 service nova] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Updating instance_info_cache with network_info: [{"id": "7e82bdd5-e879-4fcd-bb16-f0a1be82998b", "address": "fa:16:3e:b0:b9:4e", "network": {"id": "287f2df1-5ec3-4168-9fdb-db3b3fdc0f73", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1787968104-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e72113f50eaf49b4bef0f953da1e7216", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27e0a333-0cad-496c-8e6e-37a2edc97ac4", "external-id": "nsx-vlan-transportzone-83", "segmentation_id": 83, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e82bdd5-e8", "ovs_interfaceid": "7e82bdd5-e879-4fcd-bb16-f0a1be82998b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1165.974272] env[62070]: DEBUG oslo_concurrency.lockutils [req-9d97e69f-ddf4-492d-a26d-ff297549e3f0 req-799a21f3-267c-4ecb-b8c8-613f25d9fa12 service nova] Releasing lock "refresh_cache-c2b65119-77ff-437b-8f7e-cf6e83d907bb" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1166.054574] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-438280da-957e-4809-a07b-b49bafb64711 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Volume attach. Driver type: vmdk {{(pid=62070) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1166.054833] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-438280da-957e-4809-a07b-b49bafb64711 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245531', 'volume_id': '4e71ebf7-c7f2-42ea-999e-8d6b06ae2d8a', 'name': 'volume-4e71ebf7-c7f2-42ea-999e-8d6b06ae2d8a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4', 'attached_at': '', 'detached_at': '', 'volume_id': '4e71ebf7-c7f2-42ea-999e-8d6b06ae2d8a', 'serial': '4e71ebf7-c7f2-42ea-999e-8d6b06ae2d8a'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1166.055771] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff0b336-0b6e-4b29-bbad-15f634393725 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.072059] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c9441b-d65d-487b-9624-0e3c4e9e83ee {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.095905] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-438280da-957e-4809-a07b-b49bafb64711 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] volume-4e71ebf7-c7f2-42ea-999e-8d6b06ae2d8a/volume-4e71ebf7-c7f2-42ea-999e-8d6b06ae2d8a.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1166.096188] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bbad048e-0d98-42fa-bc20-c3aabca24deb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.113667] env[62070]: DEBUG oslo_vmware.api [None req-438280da-957e-4809-a07b-b49bafb64711 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1166.113667] env[62070]: value = "task-1122559" [ 1166.113667] env[62070]: _type = "Task" [ 1166.113667] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.121185] env[62070]: DEBUG oslo_vmware.api [None req-438280da-957e-4809-a07b-b49bafb64711 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122559, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.623504] env[62070]: DEBUG oslo_vmware.api [None req-438280da-957e-4809-a07b-b49bafb64711 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122559, 'name': ReconfigVM_Task, 'duration_secs': 0.380737} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.623849] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-438280da-957e-4809-a07b-b49bafb64711 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Reconfigured VM instance instance-0000006c to attach disk [datastore2] volume-4e71ebf7-c7f2-42ea-999e-8d6b06ae2d8a/volume-4e71ebf7-c7f2-42ea-999e-8d6b06ae2d8a.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1166.628673] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c3982db-3caf-48e2-afae-61267078491e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.643998] env[62070]: DEBUG oslo_vmware.api [None req-438280da-957e-4809-a07b-b49bafb64711 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1166.643998] env[62070]: value = "task-1122560" [ 1166.643998] env[62070]: _type = "Task" [ 1166.643998] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.651916] env[62070]: DEBUG oslo_vmware.api [None req-438280da-957e-4809-a07b-b49bafb64711 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122560, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.153167] env[62070]: DEBUG oslo_vmware.api [None req-438280da-957e-4809-a07b-b49bafb64711 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122560, 'name': ReconfigVM_Task, 'duration_secs': 0.30454} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.153478] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-438280da-957e-4809-a07b-b49bafb64711 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245531', 'volume_id': '4e71ebf7-c7f2-42ea-999e-8d6b06ae2d8a', 'name': 'volume-4e71ebf7-c7f2-42ea-999e-8d6b06ae2d8a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4', 'attached_at': '', 'detached_at': '', 'volume_id': '4e71ebf7-c7f2-42ea-999e-8d6b06ae2d8a', 'serial': '4e71ebf7-c7f2-42ea-999e-8d6b06ae2d8a'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1168.192953] env[62070]: DEBUG nova.objects.instance [None req-438280da-957e-4809-a07b-b49bafb64711 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lazy-loading 'flavor' on Instance uuid 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1168.698022] env[62070]: DEBUG oslo_concurrency.lockutils [None req-438280da-957e-4809-a07b-b49bafb64711 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.241s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1168.904777] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c1ce9b36-af36-44fc-8730-d99fe6048eb8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1168.905055] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c1ce9b36-af36-44fc-8730-d99fe6048eb8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1169.408284] env[62070]: INFO nova.compute.manager [None req-c1ce9b36-af36-44fc-8730-d99fe6048eb8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Detaching volume 4e71ebf7-c7f2-42ea-999e-8d6b06ae2d8a [ 1169.438630] env[62070]: INFO nova.virt.block_device [None req-c1ce9b36-af36-44fc-8730-d99fe6048eb8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Attempting to driver detach volume 4e71ebf7-c7f2-42ea-999e-8d6b06ae2d8a from mountpoint /dev/sdb [ 1169.438927] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1ce9b36-af36-44fc-8730-d99fe6048eb8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Volume detach. Driver type: vmdk {{(pid=62070) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1169.439209] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1ce9b36-af36-44fc-8730-d99fe6048eb8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245531', 'volume_id': '4e71ebf7-c7f2-42ea-999e-8d6b06ae2d8a', 'name': 'volume-4e71ebf7-c7f2-42ea-999e-8d6b06ae2d8a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4', 'attached_at': '', 'detached_at': '', 'volume_id': '4e71ebf7-c7f2-42ea-999e-8d6b06ae2d8a', 'serial': '4e71ebf7-c7f2-42ea-999e-8d6b06ae2d8a'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1169.440259] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed6676b-5e79-4606-821f-5a705d586dd4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.461593] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05331e65-4d9b-4a30-9d00-5c87331027e1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.468134] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce828bc-baee-47d9-bea7-d44308d50069 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.487627] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b916f9be-b46a-4134-915a-916cfb35f023 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.502326] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1ce9b36-af36-44fc-8730-d99fe6048eb8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] The volume has not been displaced from its original location: [datastore2] volume-4e71ebf7-c7f2-42ea-999e-8d6b06ae2d8a/volume-4e71ebf7-c7f2-42ea-999e-8d6b06ae2d8a.vmdk. No consolidation needed. {{(pid=62070) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1169.507445] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1ce9b36-af36-44fc-8730-d99fe6048eb8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Reconfiguring VM instance instance-0000006c to detach disk 2001 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1169.507742] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1cde2a0a-83df-4642-85c0-b3378831f6e8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.526881] env[62070]: DEBUG oslo_vmware.api [None req-c1ce9b36-af36-44fc-8730-d99fe6048eb8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1169.526881] env[62070]: value = "task-1122561" [ 1169.526881] env[62070]: _type = "Task" [ 1169.526881] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.536066] env[62070]: DEBUG oslo_vmware.api [None req-c1ce9b36-af36-44fc-8730-d99fe6048eb8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122561, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.036326] env[62070]: DEBUG oslo_vmware.api [None req-c1ce9b36-af36-44fc-8730-d99fe6048eb8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122561, 'name': ReconfigVM_Task, 'duration_secs': 0.217315} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.036627] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1ce9b36-af36-44fc-8730-d99fe6048eb8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Reconfigured VM instance instance-0000006c to detach disk 2001 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1170.041151] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5fd21728-2561-4537-ab3f-4f2cd805ec4f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.055825] env[62070]: DEBUG oslo_vmware.api [None req-c1ce9b36-af36-44fc-8730-d99fe6048eb8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1170.055825] env[62070]: value = "task-1122562" [ 1170.055825] env[62070]: _type = "Task" [ 1170.055825] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.063606] env[62070]: DEBUG oslo_vmware.api [None req-c1ce9b36-af36-44fc-8730-d99fe6048eb8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122562, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.565602] env[62070]: DEBUG oslo_vmware.api [None req-c1ce9b36-af36-44fc-8730-d99fe6048eb8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122562, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.066665] env[62070]: DEBUG oslo_vmware.api [None req-c1ce9b36-af36-44fc-8730-d99fe6048eb8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122562, 'name': ReconfigVM_Task, 'duration_secs': 0.754807} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.068730] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1ce9b36-af36-44fc-8730-d99fe6048eb8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245531', 'volume_id': '4e71ebf7-c7f2-42ea-999e-8d6b06ae2d8a', 'name': 'volume-4e71ebf7-c7f2-42ea-999e-8d6b06ae2d8a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4', 'attached_at': '', 'detached_at': '', 'volume_id': '4e71ebf7-c7f2-42ea-999e-8d6b06ae2d8a', 'serial': '4e71ebf7-c7f2-42ea-999e-8d6b06ae2d8a'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1171.607228] env[62070]: DEBUG nova.objects.instance [None req-c1ce9b36-af36-44fc-8730-d99fe6048eb8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lazy-loading 'flavor' on Instance uuid 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1172.615180] env[62070]: DEBUG oslo_concurrency.lockutils [None req-c1ce9b36-af36-44fc-8730-d99fe6048eb8 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.710s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1173.139014] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1173.139014] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1173.139309] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1173.139423] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1173.139607] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1173.141864] env[62070]: INFO nova.compute.manager [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Terminating instance [ 1173.143740] env[62070]: DEBUG nova.compute.manager [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1173.143945] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1173.144785] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1701940f-79b5-473a-aa65-2d58c94d64bf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.152070] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1173.152299] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7b59df45-ac59-4eb6-a9a1-24c35a2d4b24 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.158629] env[62070]: DEBUG oslo_vmware.api [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1173.158629] env[62070]: value = "task-1122563" [ 1173.158629] env[62070]: _type = "Task" [ 1173.158629] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.165779] env[62070]: DEBUG oslo_vmware.api [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122563, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.277642] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1173.277875] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1173.278039] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Starting heal instance info cache {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1173.668637] env[62070]: DEBUG oslo_vmware.api [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122563, 'name': PowerOffVM_Task, 'duration_secs': 0.183362} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.668932] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1173.669086] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1173.669373] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1a14d822-335b-40b4-af95-64e9a2dc3fb8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.739261] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1173.739487] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1173.739678] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Deleting the datastore file [datastore1] 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1173.739939] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-626e4d1d-6a7a-44fc-8c47-dd527d0c7447 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.746231] env[62070]: DEBUG oslo_vmware.api [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1173.746231] env[62070]: value = "task-1122565" [ 1173.746231] env[62070]: _type = "Task" [ 1173.746231] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.753606] env[62070]: DEBUG oslo_vmware.api [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122565, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.255576] env[62070]: DEBUG oslo_vmware.api [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122565, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158154} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.255839] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1174.256017] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1174.256213] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1174.256396] env[62070]: INFO nova.compute.manager [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1174.256648] env[62070]: DEBUG oslo.service.loopingcall [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1174.256846] env[62070]: DEBUG nova.compute.manager [-] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1174.256941] env[62070]: DEBUG nova.network.neutron [-] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1174.692946] env[62070]: DEBUG nova.compute.manager [req-4cfab98a-a736-4608-87ff-be98d6d50e6e req-b5455549-f1b4-4725-a114-d4660e5949f9 service nova] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Received event network-vif-deleted-38ef183a-1894-4b0e-b9e6-d551818dc34c {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1174.693254] env[62070]: INFO nova.compute.manager [req-4cfab98a-a736-4608-87ff-be98d6d50e6e req-b5455549-f1b4-4725-a114-d4660e5949f9 service nova] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Neutron deleted interface 38ef183a-1894-4b0e-b9e6-d551818dc34c; detaching it from the instance and deleting it from the info cache [ 1174.693351] env[62070]: DEBUG nova.network.neutron [req-4cfab98a-a736-4608-87ff-be98d6d50e6e req-b5455549-f1b4-4725-a114-d4660e5949f9 service nova] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1174.813731] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "refresh_cache-20c4fabc-fc9b-49c7-ab28-fa092ad66038" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1174.813877] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquired lock "refresh_cache-20c4fabc-fc9b-49c7-ab28-fa092ad66038" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1174.814073] env[62070]: DEBUG nova.network.neutron [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Forcefully refreshing network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1175.169271] env[62070]: DEBUG nova.network.neutron [-] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1175.196642] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c07c244d-c6ec-440a-9544-cf164336f77d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.209473] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6116aae1-641b-496a-8666-75cb2086d897 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.237350] env[62070]: DEBUG nova.compute.manager [req-4cfab98a-a736-4608-87ff-be98d6d50e6e req-b5455549-f1b4-4725-a114-d4660e5949f9 service nova] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Detach interface failed, port_id=38ef183a-1894-4b0e-b9e6-d551818dc34c, reason: Instance 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1175.672033] env[62070]: INFO nova.compute.manager [-] [instance: 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4] Took 1.41 seconds to deallocate network for instance. [ 1176.034339] env[62070]: DEBUG nova.network.neutron [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Updating instance_info_cache with network_info: [{"id": "0eb3c7d4-224c-4d94-9450-0623a1e1b162", "address": "fa:16:3e:d9:a2:6e", "network": {"id": "b9ef8f6c-bbd6-409d-a591-ad584e5e028f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-599171324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca25fba006b740f2a86fe10e4abe9400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0eb3c7d4-22", "ovs_interfaceid": "0eb3c7d4-224c-4d94-9450-0623a1e1b162", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1176.179848] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1176.180093] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1176.180360] env[62070]: DEBUG nova.objects.instance [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lazy-loading 'resources' on Instance uuid 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1176.536933] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Releasing lock "refresh_cache-20c4fabc-fc9b-49c7-ab28-fa092ad66038" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1176.537196] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Updated the network info_cache for instance {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1176.537422] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1176.537590] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1176.537735] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1176.537879] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1176.538027] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1176.538174] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1176.750289] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-008df752-1373-49b3-9d44-8f9e90a34170 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.758263] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fe5d040-8ea3-4b65-a107-036ac791359b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.787850] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01cc370a-74f0-4a4c-b15b-efffc3b342c4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.795063] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49debbb-3f3e-4932-a647-11a38c759d8a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.807811] env[62070]: DEBUG nova.compute.provider_tree [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1177.041758] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Getting list of instances from cluster (obj){ [ 1177.041758] env[62070]: value = "domain-c8" [ 1177.041758] env[62070]: _type = "ClusterComputeResource" [ 1177.041758] env[62070]: } {{(pid=62070) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1177.042785] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea8af4a4-9766-4f90-ad41-9dd8a6505284 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.055141] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Got total of 3 instances {{(pid=62070) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1177.055255] env[62070]: WARNING nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] While synchronizing instance power states, found 4 instances in the database and 3 instances on the hypervisor. [ 1177.055399] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Triggering sync for uuid 20c4fabc-fc9b-49c7-ab28-fa092ad66038 {{(pid=62070) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1177.055597] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Triggering sync for uuid 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4 {{(pid=62070) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1177.055755] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Triggering sync for uuid 9a37cf1a-fd25-48b9-923d-75a95857101b {{(pid=62070) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1177.055909] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Triggering sync for uuid c2b65119-77ff-437b-8f7e-cf6e83d907bb {{(pid=62070) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1177.056267] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.056499] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1177.056768] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.056991] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "9a37cf1a-fd25-48b9-923d-75a95857101b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.057211] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "9a37cf1a-fd25-48b9-923d-75a95857101b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1177.057461] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "c2b65119-77ff-437b-8f7e-cf6e83d907bb" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.057651] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "c2b65119-77ff-437b-8f7e-cf6e83d907bb" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1177.057853] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1177.057986] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62070) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1177.058709] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0fca472-5301-48e8-833e-5e5600e9f490 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.061572] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-513abbb9-2028-45b3-9980-a9984bda7ad9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.064364] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ef3cc5-0e01-4c11-9bc8-209f7f0f54ee {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.066763] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1177.311631] env[62070]: DEBUG nova.scheduler.client.report [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1177.569812] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.579609] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "9a37cf1a-fd25-48b9-923d-75a95857101b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.522s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1177.579925] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "c2b65119-77ff-437b-8f7e-cf6e83d907bb" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.522s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1177.580265] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.524s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1177.816186] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.636s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1177.818706] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.249s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1177.818876] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1177.819285] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62070) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1177.819988] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1d35c9b-540b-4f57-81fb-f7e5c87e8648 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.828336] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f449577d-3538-4354-ab5b-c570aebedffe {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.842466] env[62070]: INFO nova.scheduler.client.report [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Deleted allocations for instance 8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4 [ 1177.843889] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a1f24c-db2d-4c2b-a261-089f5209e733 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.852744] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad17969-64c8-43fc-98ce-09f07bc14e04 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.881568] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180817MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=62070) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1177.881732] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.881890] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1178.353095] env[62070]: DEBUG oslo_concurrency.lockutils [None req-e9fae1d6-0b57-4d46-97ed-ddee8a18397a tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.214s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1178.353974] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 1.297s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1178.354441] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c169dcf0-4aa1-4322-b1fd-2e31d5eb5bd6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.363463] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1e3e98-19ff-49b0-8a18-9a6be7941d57 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.897137] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "8d3cc6bf-5ae8-45d8-ba70-269e0d290ee4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.543s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1178.913145] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 20c4fabc-fc9b-49c7-ab28-fa092ad66038 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1178.913326] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 9a37cf1a-fd25-48b9-923d-75a95857101b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1178.913453] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance c2b65119-77ff-437b-8f7e-cf6e83d907bb actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1178.913633] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=62070) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1178.913769] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=62070) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1178.962146] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1fff388-46d7-49ac-b798-43bc5f47f5fb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.969885] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4bc6fa6-b64e-4cc5-8c40-bd6e00b66fb3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.999709] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3df6201-2674-458e-bef3-b3b97d3bbdcf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.007726] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1010a96b-c433-4e08-9cfe-b05317d4222f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.021468] env[62070]: DEBUG nova.compute.provider_tree [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1179.524371] env[62070]: DEBUG nova.scheduler.client.report [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1180.016402] env[62070]: DEBUG oslo_concurrency.lockutils [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "689ba5a3-2253-4dc1-a47b-db152f86abd3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1180.016644] env[62070]: DEBUG oslo_concurrency.lockutils [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "689ba5a3-2253-4dc1-a47b-db152f86abd3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1180.029443] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62070) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1180.029618] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.148s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.518651] env[62070]: DEBUG nova.compute.manager [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1181.038012] env[62070]: DEBUG oslo_concurrency.lockutils [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1181.038294] env[62070]: DEBUG oslo_concurrency.lockutils [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1181.039820] env[62070]: INFO nova.compute.claims [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1182.099654] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6484ee-ca75-40d1-90ca-6afdafd5e143 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.107412] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f5ab49-2722-4884-8623-6135599fda81 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.136031] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84795545-a2e4-400a-8bc3-2a695af694b0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.142790] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7418c54-2adb-4aa6-b5e6-be218d1dc73a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.156452] env[62070]: DEBUG nova.compute.provider_tree [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1182.659165] env[62070]: DEBUG nova.scheduler.client.report [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1183.163558] env[62070]: DEBUG oslo_concurrency.lockutils [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.125s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1183.164097] env[62070]: DEBUG nova.compute.manager [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1183.668976] env[62070]: DEBUG nova.compute.utils [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1183.670456] env[62070]: DEBUG nova.compute.manager [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1183.670630] env[62070]: DEBUG nova.network.neutron [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1183.715739] env[62070]: DEBUG nova.policy [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a91eca948b964f1885f1effb82ea35dc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '735d24ccc5614660a5b34d77af648f94', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 1183.974512] env[62070]: DEBUG nova.network.neutron [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Successfully created port: f7325230-82ee-4433-865b-d8d7ced03602 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1184.173331] env[62070]: DEBUG nova.compute.manager [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1185.182966] env[62070]: DEBUG nova.compute.manager [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1185.209141] env[62070]: DEBUG nova.virt.hardware [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1185.209441] env[62070]: DEBUG nova.virt.hardware [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1185.209615] env[62070]: DEBUG nova.virt.hardware [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1185.209805] env[62070]: DEBUG nova.virt.hardware [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1185.209957] env[62070]: DEBUG nova.virt.hardware [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1185.210124] env[62070]: DEBUG nova.virt.hardware [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1185.210376] env[62070]: DEBUG nova.virt.hardware [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1185.210567] env[62070]: DEBUG nova.virt.hardware [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1185.210744] env[62070]: DEBUG nova.virt.hardware [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1185.210913] env[62070]: DEBUG nova.virt.hardware [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1185.211103] env[62070]: DEBUG nova.virt.hardware [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1185.211959] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4025e247-09df-4e36-88a0-76c6bc8c9619 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.219476] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-609cb0b4-440b-4599-8621-6c25d32985eb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.339119] env[62070]: DEBUG nova.compute.manager [req-2d2a918d-8ae2-4bf9-9a5b-aff1064b7479 req-35aab797-b996-4a62-9fcd-56a9039de6b2 service nova] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Received event network-vif-plugged-f7325230-82ee-4433-865b-d8d7ced03602 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1185.339399] env[62070]: DEBUG oslo_concurrency.lockutils [req-2d2a918d-8ae2-4bf9-9a5b-aff1064b7479 req-35aab797-b996-4a62-9fcd-56a9039de6b2 service nova] Acquiring lock "689ba5a3-2253-4dc1-a47b-db152f86abd3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1185.339576] env[62070]: DEBUG oslo_concurrency.lockutils [req-2d2a918d-8ae2-4bf9-9a5b-aff1064b7479 req-35aab797-b996-4a62-9fcd-56a9039de6b2 service nova] Lock "689ba5a3-2253-4dc1-a47b-db152f86abd3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.339785] env[62070]: DEBUG oslo_concurrency.lockutils [req-2d2a918d-8ae2-4bf9-9a5b-aff1064b7479 req-35aab797-b996-4a62-9fcd-56a9039de6b2 service nova] Lock "689ba5a3-2253-4dc1-a47b-db152f86abd3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.339962] env[62070]: DEBUG nova.compute.manager [req-2d2a918d-8ae2-4bf9-9a5b-aff1064b7479 req-35aab797-b996-4a62-9fcd-56a9039de6b2 service nova] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] No waiting events found dispatching network-vif-plugged-f7325230-82ee-4433-865b-d8d7ced03602 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1185.340150] env[62070]: WARNING nova.compute.manager [req-2d2a918d-8ae2-4bf9-9a5b-aff1064b7479 req-35aab797-b996-4a62-9fcd-56a9039de6b2 service nova] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Received unexpected event network-vif-plugged-f7325230-82ee-4433-865b-d8d7ced03602 for instance with vm_state building and task_state spawning. [ 1185.421399] env[62070]: DEBUG nova.network.neutron [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Successfully updated port: f7325230-82ee-4433-865b-d8d7ced03602 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1185.924654] env[62070]: DEBUG oslo_concurrency.lockutils [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "refresh_cache-689ba5a3-2253-4dc1-a47b-db152f86abd3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1185.924831] env[62070]: DEBUG oslo_concurrency.lockutils [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquired lock "refresh_cache-689ba5a3-2253-4dc1-a47b-db152f86abd3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.924992] env[62070]: DEBUG nova.network.neutron [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1186.456269] env[62070]: DEBUG nova.network.neutron [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1186.763833] env[62070]: DEBUG nova.network.neutron [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Updating instance_info_cache with network_info: [{"id": "f7325230-82ee-4433-865b-d8d7ced03602", "address": "fa:16:3e:c2:5b:0e", "network": {"id": "6ea9aade-1b40-4ce8-a502-14ff09a4ab40", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1617295069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "735d24ccc5614660a5b34d77af648f94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7325230-82", "ovs_interfaceid": "f7325230-82ee-4433-865b-d8d7ced03602", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1187.266737] env[62070]: DEBUG oslo_concurrency.lockutils [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Releasing lock "refresh_cache-689ba5a3-2253-4dc1-a47b-db152f86abd3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1187.267119] env[62070]: DEBUG nova.compute.manager [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Instance network_info: |[{"id": "f7325230-82ee-4433-865b-d8d7ced03602", "address": "fa:16:3e:c2:5b:0e", "network": {"id": "6ea9aade-1b40-4ce8-a502-14ff09a4ab40", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1617295069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "735d24ccc5614660a5b34d77af648f94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7325230-82", "ovs_interfaceid": "f7325230-82ee-4433-865b-d8d7ced03602", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1187.267589] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:5b:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5b8af79a-31d5-4d78-93d7-3919aa1d9186', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f7325230-82ee-4433-865b-d8d7ced03602', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1187.275177] env[62070]: DEBUG oslo.service.loopingcall [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1187.275402] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1187.275638] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f3b49bc9-a7ec-4e25-8069-c5b97eae8dde {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.297898] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1187.297898] env[62070]: value = "task-1122566" [ 1187.297898] env[62070]: _type = "Task" [ 1187.297898] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.305474] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122566, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.367925] env[62070]: DEBUG nova.compute.manager [req-accf771b-7f53-4b6c-b6a5-7ebbf7d55339 req-4e7f9f24-64e6-45de-b650-4b5c6a1dfaed service nova] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Received event network-changed-f7325230-82ee-4433-865b-d8d7ced03602 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1187.368159] env[62070]: DEBUG nova.compute.manager [req-accf771b-7f53-4b6c-b6a5-7ebbf7d55339 req-4e7f9f24-64e6-45de-b650-4b5c6a1dfaed service nova] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Refreshing instance network info cache due to event network-changed-f7325230-82ee-4433-865b-d8d7ced03602. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1187.368393] env[62070]: DEBUG oslo_concurrency.lockutils [req-accf771b-7f53-4b6c-b6a5-7ebbf7d55339 req-4e7f9f24-64e6-45de-b650-4b5c6a1dfaed service nova] Acquiring lock "refresh_cache-689ba5a3-2253-4dc1-a47b-db152f86abd3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1187.368543] env[62070]: DEBUG oslo_concurrency.lockutils [req-accf771b-7f53-4b6c-b6a5-7ebbf7d55339 req-4e7f9f24-64e6-45de-b650-4b5c6a1dfaed service nova] Acquired lock "refresh_cache-689ba5a3-2253-4dc1-a47b-db152f86abd3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.368729] env[62070]: DEBUG nova.network.neutron [req-accf771b-7f53-4b6c-b6a5-7ebbf7d55339 req-4e7f9f24-64e6-45de-b650-4b5c6a1dfaed service nova] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Refreshing network info cache for port f7325230-82ee-4433-865b-d8d7ced03602 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1187.807861] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122566, 'name': CreateVM_Task, 'duration_secs': 0.317726} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.808226] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1187.808723] env[62070]: DEBUG oslo_concurrency.lockutils [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1187.808896] env[62070]: DEBUG oslo_concurrency.lockutils [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.809254] env[62070]: DEBUG oslo_concurrency.lockutils [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1187.809526] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-316820d0-5f76-4e3d-883c-9ae02879dcec {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.814110] env[62070]: DEBUG oslo_vmware.api [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1187.814110] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5282c00a-42ee-c18a-ef1c-79f027754268" [ 1187.814110] env[62070]: _type = "Task" [ 1187.814110] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.822077] env[62070]: DEBUG oslo_vmware.api [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5282c00a-42ee-c18a-ef1c-79f027754268, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.119672] env[62070]: DEBUG nova.network.neutron [req-accf771b-7f53-4b6c-b6a5-7ebbf7d55339 req-4e7f9f24-64e6-45de-b650-4b5c6a1dfaed service nova] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Updated VIF entry in instance network info cache for port f7325230-82ee-4433-865b-d8d7ced03602. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1188.120052] env[62070]: DEBUG nova.network.neutron [req-accf771b-7f53-4b6c-b6a5-7ebbf7d55339 req-4e7f9f24-64e6-45de-b650-4b5c6a1dfaed service nova] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Updating instance_info_cache with network_info: [{"id": "f7325230-82ee-4433-865b-d8d7ced03602", "address": "fa:16:3e:c2:5b:0e", "network": {"id": "6ea9aade-1b40-4ce8-a502-14ff09a4ab40", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1617295069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "735d24ccc5614660a5b34d77af648f94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7325230-82", "ovs_interfaceid": "f7325230-82ee-4433-865b-d8d7ced03602", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.324092] env[62070]: DEBUG oslo_vmware.api [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5282c00a-42ee-c18a-ef1c-79f027754268, 'name': SearchDatastore_Task, 'duration_secs': 0.010215} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.324092] env[62070]: DEBUG oslo_concurrency.lockutils [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1188.324092] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1188.324350] env[62070]: DEBUG oslo_concurrency.lockutils [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1188.324350] env[62070]: DEBUG oslo_concurrency.lockutils [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1188.324484] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1188.324803] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-62b23a00-fcc9-41c5-8c9d-9021c8a50c77 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.332451] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1188.332618] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1188.333280] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8c9c6b7-8ef8-4dbb-adbe-86c3971191f4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.337758] env[62070]: DEBUG oslo_vmware.api [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1188.337758] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52117f40-3bba-b2a1-4a9c-c6dbeb09a89b" [ 1188.337758] env[62070]: _type = "Task" [ 1188.337758] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.344635] env[62070]: DEBUG oslo_vmware.api [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52117f40-3bba-b2a1-4a9c-c6dbeb09a89b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.622790] env[62070]: DEBUG oslo_concurrency.lockutils [req-accf771b-7f53-4b6c-b6a5-7ebbf7d55339 req-4e7f9f24-64e6-45de-b650-4b5c6a1dfaed service nova] Releasing lock "refresh_cache-689ba5a3-2253-4dc1-a47b-db152f86abd3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1188.848486] env[62070]: DEBUG oslo_vmware.api [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52117f40-3bba-b2a1-4a9c-c6dbeb09a89b, 'name': SearchDatastore_Task, 'duration_secs': 0.007525} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.849251] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4fbde1f-47f7-4d0c-9690-eec3ed44e87c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.854110] env[62070]: DEBUG oslo_vmware.api [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1188.854110] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]523d289d-6ba7-2619-3011-dc252b120733" [ 1188.854110] env[62070]: _type = "Task" [ 1188.854110] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.861096] env[62070]: DEBUG oslo_vmware.api [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]523d289d-6ba7-2619-3011-dc252b120733, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.363971] env[62070]: DEBUG oslo_vmware.api [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]523d289d-6ba7-2619-3011-dc252b120733, 'name': SearchDatastore_Task, 'duration_secs': 0.009213} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.364252] env[62070]: DEBUG oslo_concurrency.lockutils [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1189.364517] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 689ba5a3-2253-4dc1-a47b-db152f86abd3/689ba5a3-2253-4dc1-a47b-db152f86abd3.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1189.364774] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-65c50f83-b467-447e-83b0-196016e66dd6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.370694] env[62070]: DEBUG oslo_vmware.api [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1189.370694] env[62070]: value = "task-1122567" [ 1189.370694] env[62070]: _type = "Task" [ 1189.370694] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.378133] env[62070]: DEBUG oslo_vmware.api [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122567, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.880990] env[62070]: DEBUG oslo_vmware.api [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122567, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.444516} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.881380] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 689ba5a3-2253-4dc1-a47b-db152f86abd3/689ba5a3-2253-4dc1-a47b-db152f86abd3.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1189.881522] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1189.881730] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-585ec808-88a1-4eff-9706-ed63af51eac8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.887521] env[62070]: DEBUG oslo_vmware.api [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1189.887521] env[62070]: value = "task-1122568" [ 1189.887521] env[62070]: _type = "Task" [ 1189.887521] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.894685] env[62070]: DEBUG oslo_vmware.api [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122568, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.935381] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c070183-d9ab-4f52-94be-9d5227761019 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "9a37cf1a-fd25-48b9-923d-75a95857101b" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1189.935636] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c070183-d9ab-4f52-94be-9d5227761019 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "9a37cf1a-fd25-48b9-923d-75a95857101b" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1189.935826] env[62070]: DEBUG nova.compute.manager [None req-4c070183-d9ab-4f52-94be-9d5227761019 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1189.936651] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be88f26-e256-4626-8d7d-5b6398c98656 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.942390] env[62070]: DEBUG nova.compute.manager [None req-4c070183-d9ab-4f52-94be-9d5227761019 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62070) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1189.942960] env[62070]: DEBUG nova.objects.instance [None req-4c070183-d9ab-4f52-94be-9d5227761019 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lazy-loading 'flavor' on Instance uuid 9a37cf1a-fd25-48b9-923d-75a95857101b {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1190.396730] env[62070]: DEBUG oslo_vmware.api [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122568, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.291766} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.396995] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1190.397752] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79c48aa4-3358-4f9b-a5a8-969d9bd8474b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.418377] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] 689ba5a3-2253-4dc1-a47b-db152f86abd3/689ba5a3-2253-4dc1-a47b-db152f86abd3.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1190.418603] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3410c19a-67d4-4f54-9264-71b37bbb8b5c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.436874] env[62070]: DEBUG oslo_vmware.api [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1190.436874] env[62070]: value = "task-1122569" [ 1190.436874] env[62070]: _type = "Task" [ 1190.436874] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.444208] env[62070]: DEBUG oslo_vmware.api [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122569, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.447771] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c070183-d9ab-4f52-94be-9d5227761019 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1190.447984] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-886672b4-e8c8-42d6-b275-c8dd1518b396 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.453872] env[62070]: DEBUG oslo_vmware.api [None req-4c070183-d9ab-4f52-94be-9d5227761019 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1190.453872] env[62070]: value = "task-1122570" [ 1190.453872] env[62070]: _type = "Task" [ 1190.453872] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.460954] env[62070]: DEBUG oslo_vmware.api [None req-4c070183-d9ab-4f52-94be-9d5227761019 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122570, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.947814] env[62070]: DEBUG oslo_vmware.api [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122569, 'name': ReconfigVM_Task, 'duration_secs': 0.46744} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.948207] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Reconfigured VM instance instance-0000006f to attach disk [datastore2] 689ba5a3-2253-4dc1-a47b-db152f86abd3/689ba5a3-2253-4dc1-a47b-db152f86abd3.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1190.948858] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9fbbba29-1b65-421e-b10a-705fc0c4d4b2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.954751] env[62070]: DEBUG oslo_vmware.api [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1190.954751] env[62070]: value = "task-1122571" [ 1190.954751] env[62070]: _type = "Task" [ 1190.954751] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.964385] env[62070]: DEBUG oslo_vmware.api [None req-4c070183-d9ab-4f52-94be-9d5227761019 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122570, 'name': PowerOffVM_Task, 'duration_secs': 0.213546} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.967322] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c070183-d9ab-4f52-94be-9d5227761019 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1190.967583] env[62070]: DEBUG nova.compute.manager [None req-4c070183-d9ab-4f52-94be-9d5227761019 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1190.967861] env[62070]: DEBUG oslo_vmware.api [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122571, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.968534] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-186cd2e9-b0e7-4898-b34d-e834278e0742 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.467529] env[62070]: DEBUG oslo_vmware.api [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122571, 'name': Rename_Task, 'duration_secs': 0.163174} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.467815] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1191.468068] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-79f65b5b-9909-47ac-889f-dd58c288bd45 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.474639] env[62070]: DEBUG oslo_vmware.api [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1191.474639] env[62070]: value = "task-1122572" [ 1191.474639] env[62070]: _type = "Task" [ 1191.474639] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.479594] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4c070183-d9ab-4f52-94be-9d5227761019 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "9a37cf1a-fd25-48b9-923d-75a95857101b" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.544s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1191.483350] env[62070]: DEBUG oslo_vmware.api [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122572, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.984255] env[62070]: DEBUG oslo_vmware.api [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122572, 'name': PowerOnVM_Task, 'duration_secs': 0.484086} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.984586] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1191.984709] env[62070]: INFO nova.compute.manager [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Took 6.80 seconds to spawn the instance on the hypervisor. [ 1191.984888] env[62070]: DEBUG nova.compute.manager [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1191.985679] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f05e092-c0fe-488a-b9d4-2a1fae0eb83b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.338351] env[62070]: DEBUG nova.objects.instance [None req-024172be-e25f-496b-abba-c53dc4be938d tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lazy-loading 'flavor' on Instance uuid 9a37cf1a-fd25-48b9-923d-75a95857101b {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1192.501881] env[62070]: INFO nova.compute.manager [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Took 11.48 seconds to build instance. [ 1192.843746] env[62070]: DEBUG oslo_concurrency.lockutils [None req-024172be-e25f-496b-abba-c53dc4be938d tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "refresh_cache-9a37cf1a-fd25-48b9-923d-75a95857101b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1192.843985] env[62070]: DEBUG oslo_concurrency.lockutils [None req-024172be-e25f-496b-abba-c53dc4be938d tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired lock "refresh_cache-9a37cf1a-fd25-48b9-923d-75a95857101b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.844294] env[62070]: DEBUG nova.network.neutron [None req-024172be-e25f-496b-abba-c53dc4be938d tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1192.844614] env[62070]: DEBUG nova.objects.instance [None req-024172be-e25f-496b-abba-c53dc4be938d tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lazy-loading 'info_cache' on Instance uuid 9a37cf1a-fd25-48b9-923d-75a95857101b {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1193.004046] env[62070]: DEBUG oslo_concurrency.lockutils [None req-49fd381c-c163-461e-892d-e41e84451551 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "689ba5a3-2253-4dc1-a47b-db152f86abd3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.987s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.131520] env[62070]: DEBUG nova.compute.manager [req-e1c4a7db-f671-4ed8-aa67-024c05fe47df req-56a5d4b5-08ab-4cee-bf42-bf7bcef988d0 service nova] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Received event network-changed-f7325230-82ee-4433-865b-d8d7ced03602 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1193.131720] env[62070]: DEBUG nova.compute.manager [req-e1c4a7db-f671-4ed8-aa67-024c05fe47df req-56a5d4b5-08ab-4cee-bf42-bf7bcef988d0 service nova] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Refreshing instance network info cache due to event network-changed-f7325230-82ee-4433-865b-d8d7ced03602. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1193.131929] env[62070]: DEBUG oslo_concurrency.lockutils [req-e1c4a7db-f671-4ed8-aa67-024c05fe47df req-56a5d4b5-08ab-4cee-bf42-bf7bcef988d0 service nova] Acquiring lock "refresh_cache-689ba5a3-2253-4dc1-a47b-db152f86abd3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1193.132088] env[62070]: DEBUG oslo_concurrency.lockutils [req-e1c4a7db-f671-4ed8-aa67-024c05fe47df req-56a5d4b5-08ab-4cee-bf42-bf7bcef988d0 service nova] Acquired lock "refresh_cache-689ba5a3-2253-4dc1-a47b-db152f86abd3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1193.132255] env[62070]: DEBUG nova.network.neutron [req-e1c4a7db-f671-4ed8-aa67-024c05fe47df req-56a5d4b5-08ab-4cee-bf42-bf7bcef988d0 service nova] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Refreshing network info cache for port f7325230-82ee-4433-865b-d8d7ced03602 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1193.348097] env[62070]: DEBUG nova.objects.base [None req-024172be-e25f-496b-abba-c53dc4be938d tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Object Instance<9a37cf1a-fd25-48b9-923d-75a95857101b> lazy-loaded attributes: flavor,info_cache {{(pid=62070) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1193.841592] env[62070]: DEBUG nova.network.neutron [req-e1c4a7db-f671-4ed8-aa67-024c05fe47df req-56a5d4b5-08ab-4cee-bf42-bf7bcef988d0 service nova] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Updated VIF entry in instance network info cache for port f7325230-82ee-4433-865b-d8d7ced03602. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1193.841788] env[62070]: DEBUG nova.network.neutron [req-e1c4a7db-f671-4ed8-aa67-024c05fe47df req-56a5d4b5-08ab-4cee-bf42-bf7bcef988d0 service nova] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Updating instance_info_cache with network_info: [{"id": "f7325230-82ee-4433-865b-d8d7ced03602", "address": "fa:16:3e:c2:5b:0e", "network": {"id": "6ea9aade-1b40-4ce8-a502-14ff09a4ab40", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1617295069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.218", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "735d24ccc5614660a5b34d77af648f94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7325230-82", "ovs_interfaceid": "f7325230-82ee-4433-865b-d8d7ced03602", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.048025] env[62070]: DEBUG nova.network.neutron [None req-024172be-e25f-496b-abba-c53dc4be938d tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Updating instance_info_cache with network_info: [{"id": "fe7b579a-99c3-40b9-a975-95ddca559b4d", "address": "fa:16:3e:72:7f:c3", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe7b579a-99", "ovs_interfaceid": "fe7b579a-99c3-40b9-a975-95ddca559b4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.314427] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a76a553e-3e99-4b7d-b83f-d292af733650 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.314721] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a76a553e-3e99-4b7d-b83f-d292af733650 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.344537] env[62070]: DEBUG oslo_concurrency.lockutils [req-e1c4a7db-f671-4ed8-aa67-024c05fe47df req-56a5d4b5-08ab-4cee-bf42-bf7bcef988d0 service nova] Releasing lock "refresh_cache-689ba5a3-2253-4dc1-a47b-db152f86abd3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1194.552272] env[62070]: DEBUG oslo_concurrency.lockutils [None req-024172be-e25f-496b-abba-c53dc4be938d tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Releasing lock "refresh_cache-9a37cf1a-fd25-48b9-923d-75a95857101b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1194.817527] env[62070]: INFO nova.compute.manager [None req-a76a553e-3e99-4b7d-b83f-d292af733650 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Detaching volume a9b04c95-bddc-41c3-99a5-c067625df8d2 [ 1194.847875] env[62070]: INFO nova.virt.block_device [None req-a76a553e-3e99-4b7d-b83f-d292af733650 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Attempting to driver detach volume a9b04c95-bddc-41c3-99a5-c067625df8d2 from mountpoint /dev/sdb [ 1194.848088] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-a76a553e-3e99-4b7d-b83f-d292af733650 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Volume detach. Driver type: vmdk {{(pid=62070) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1194.848293] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-a76a553e-3e99-4b7d-b83f-d292af733650 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245524', 'volume_id': 'a9b04c95-bddc-41c3-99a5-c067625df8d2', 'name': 'volume-a9b04c95-bddc-41c3-99a5-c067625df8d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '20c4fabc-fc9b-49c7-ab28-fa092ad66038', 'attached_at': '', 'detached_at': '', 'volume_id': 'a9b04c95-bddc-41c3-99a5-c067625df8d2', 'serial': 'a9b04c95-bddc-41c3-99a5-c067625df8d2'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1194.849189] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-871d35ff-6bb6-403c-bdf7-25b2fc496e25 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.871544] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a51d5c0-51de-4f3b-b558-4c8479e4ba10 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.878171] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d895f3-e83f-4a67-a80c-66207cecdd05 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.897648] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3abba38b-59a4-4823-a41e-edd306b27943 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.911637] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-a76a553e-3e99-4b7d-b83f-d292af733650 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] The volume has not been displaced from its original location: [datastore1] volume-a9b04c95-bddc-41c3-99a5-c067625df8d2/volume-a9b04c95-bddc-41c3-99a5-c067625df8d2.vmdk. No consolidation needed. {{(pid=62070) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1194.916688] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-a76a553e-3e99-4b7d-b83f-d292af733650 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Reconfiguring VM instance instance-00000069 to detach disk 2001 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1194.916947] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa7b07f8-f3bc-4bcd-9a8f-81e4e357cdcf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.934423] env[62070]: DEBUG oslo_vmware.api [None req-a76a553e-3e99-4b7d-b83f-d292af733650 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1194.934423] env[62070]: value = "task-1122573" [ 1194.934423] env[62070]: _type = "Task" [ 1194.934423] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.941498] env[62070]: DEBUG oslo_vmware.api [None req-a76a553e-3e99-4b7d-b83f-d292af733650 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122573, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.054947] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-024172be-e25f-496b-abba-c53dc4be938d tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1195.055355] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-827ed8a3-1d21-45f8-b338-889cb28a0d52 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.062429] env[62070]: DEBUG oslo_vmware.api [None req-024172be-e25f-496b-abba-c53dc4be938d tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1195.062429] env[62070]: value = "task-1122574" [ 1195.062429] env[62070]: _type = "Task" [ 1195.062429] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.071430] env[62070]: DEBUG oslo_vmware.api [None req-024172be-e25f-496b-abba-c53dc4be938d tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122574, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.444709] env[62070]: DEBUG oslo_vmware.api [None req-a76a553e-3e99-4b7d-b83f-d292af733650 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122573, 'name': ReconfigVM_Task, 'duration_secs': 0.23476} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.445085] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-a76a553e-3e99-4b7d-b83f-d292af733650 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Reconfigured VM instance instance-00000069 to detach disk 2001 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1195.449880] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ae621d8-d96c-4dcb-abab-84699de3d7d9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.464811] env[62070]: DEBUG oslo_vmware.api [None req-a76a553e-3e99-4b7d-b83f-d292af733650 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1195.464811] env[62070]: value = "task-1122575" [ 1195.464811] env[62070]: _type = "Task" [ 1195.464811] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.474600] env[62070]: DEBUG oslo_vmware.api [None req-a76a553e-3e99-4b7d-b83f-d292af733650 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122575, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.571649] env[62070]: DEBUG oslo_vmware.api [None req-024172be-e25f-496b-abba-c53dc4be938d tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122574, 'name': PowerOnVM_Task, 'duration_secs': 0.446149} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.571922] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-024172be-e25f-496b-abba-c53dc4be938d tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1195.572128] env[62070]: DEBUG nova.compute.manager [None req-024172be-e25f-496b-abba-c53dc4be938d tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1195.572861] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43c69fc7-47d7-485c-81d4-5080e9e265eb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.977154] env[62070]: DEBUG oslo_vmware.api [None req-a76a553e-3e99-4b7d-b83f-d292af733650 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122575, 'name': ReconfigVM_Task, 'duration_secs': 0.19749} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.977518] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-a76a553e-3e99-4b7d-b83f-d292af733650 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245524', 'volume_id': 'a9b04c95-bddc-41c3-99a5-c067625df8d2', 'name': 'volume-a9b04c95-bddc-41c3-99a5-c067625df8d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '20c4fabc-fc9b-49c7-ab28-fa092ad66038', 'attached_at': '', 'detached_at': '', 'volume_id': 'a9b04c95-bddc-41c3-99a5-c067625df8d2', 'serial': 'a9b04c95-bddc-41c3-99a5-c067625df8d2'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1196.522989] env[62070]: DEBUG nova.objects.instance [None req-a76a553e-3e99-4b7d-b83f-d292af733650 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lazy-loading 'flavor' on Instance uuid 20c4fabc-fc9b-49c7-ab28-fa092ad66038 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1197.455111] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90775b9d-2ff4-4fab-8878-9a0f8d2a64bd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.461869] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c43253db-35e1-4f9e-8bcc-2e836b66b1ac tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Suspending the VM {{(pid=62070) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1163}} [ 1197.462123] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-c2e49a8f-e251-4813-b68c-ed7d3434c6f6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.468473] env[62070]: DEBUG oslo_vmware.api [None req-c43253db-35e1-4f9e-8bcc-2e836b66b1ac tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1197.468473] env[62070]: value = "task-1122576" [ 1197.468473] env[62070]: _type = "Task" [ 1197.468473] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.475909] env[62070]: DEBUG oslo_vmware.api [None req-c43253db-35e1-4f9e-8bcc-2e836b66b1ac tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122576, 'name': SuspendVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.530756] env[62070]: DEBUG oslo_concurrency.lockutils [None req-a76a553e-3e99-4b7d-b83f-d292af733650 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.216s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1197.977920] env[62070]: DEBUG oslo_vmware.api [None req-c43253db-35e1-4f9e-8bcc-2e836b66b1ac tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122576, 'name': SuspendVM_Task} progress is 75%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.478244] env[62070]: DEBUG oslo_vmware.api [None req-c43253db-35e1-4f9e-8bcc-2e836b66b1ac tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122576, 'name': SuspendVM_Task, 'duration_secs': 0.590539} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.478524] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-c43253db-35e1-4f9e-8bcc-2e836b66b1ac tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Suspended the VM {{(pid=62070) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1167}} [ 1198.478709] env[62070]: DEBUG nova.compute.manager [None req-c43253db-35e1-4f9e-8bcc-2e836b66b1ac tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1198.479482] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5dd722c-018d-40b2-9b21-af1a4824636f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.548947] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1198.549304] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1198.549381] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1198.549593] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1198.549771] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1198.551734] env[62070]: INFO nova.compute.manager [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Terminating instance [ 1198.553420] env[62070]: DEBUG nova.compute.manager [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1198.553622] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1198.554431] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9588bbf1-f761-4548-9bcc-fd5d613270e7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.561912] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1198.562146] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d1e7add2-f4c0-4742-9c5f-3f304dc28352 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.568266] env[62070]: DEBUG oslo_vmware.api [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1198.568266] env[62070]: value = "task-1122577" [ 1198.568266] env[62070]: _type = "Task" [ 1198.568266] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.576033] env[62070]: DEBUG oslo_vmware.api [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122577, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.078222] env[62070]: DEBUG oslo_vmware.api [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122577, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.580076] env[62070]: DEBUG oslo_vmware.api [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122577, 'name': PowerOffVM_Task, 'duration_secs': 0.687624} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.580076] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1199.580076] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1199.580076] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d38174f7-a3e8-444f-a082-032aadf0b467 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.649831] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1199.650038] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1199.650206] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Deleting the datastore file [datastore1] 20c4fabc-fc9b-49c7-ab28-fa092ad66038 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1199.650484] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6fb50026-4058-4462-8297-067b66fbf1f0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.656769] env[62070]: DEBUG oslo_vmware.api [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1199.656769] env[62070]: value = "task-1122579" [ 1199.656769] env[62070]: _type = "Task" [ 1199.656769] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.664424] env[62070]: DEBUG oslo_vmware.api [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122579, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.837347] env[62070]: INFO nova.compute.manager [None req-81d98a03-f8a6-4915-a8d4-594e9529a93c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Resuming [ 1199.838027] env[62070]: DEBUG nova.objects.instance [None req-81d98a03-f8a6-4915-a8d4-594e9529a93c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lazy-loading 'flavor' on Instance uuid 9a37cf1a-fd25-48b9-923d-75a95857101b {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1200.165835] env[62070]: DEBUG oslo_vmware.api [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122579, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167663} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.166102] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1200.166296] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1200.166475] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1200.166657] env[62070]: INFO nova.compute.manager [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Took 1.61 seconds to destroy the instance on the hypervisor. [ 1200.166902] env[62070]: DEBUG oslo.service.loopingcall [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1200.167116] env[62070]: DEBUG nova.compute.manager [-] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1200.167213] env[62070]: DEBUG nova.network.neutron [-] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1200.659386] env[62070]: DEBUG nova.compute.manager [req-951c6f4e-8668-442d-a17d-6b7d6cc54b08 req-1498fe77-8a1b-4b67-9871-56d2873fae67 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Received event network-vif-deleted-0eb3c7d4-224c-4d94-9450-0623a1e1b162 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1200.659725] env[62070]: INFO nova.compute.manager [req-951c6f4e-8668-442d-a17d-6b7d6cc54b08 req-1498fe77-8a1b-4b67-9871-56d2873fae67 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Neutron deleted interface 0eb3c7d4-224c-4d94-9450-0623a1e1b162; detaching it from the instance and deleting it from the info cache [ 1200.659806] env[62070]: DEBUG nova.network.neutron [req-951c6f4e-8668-442d-a17d-6b7d6cc54b08 req-1498fe77-8a1b-4b67-9871-56d2873fae67 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1200.846480] env[62070]: DEBUG oslo_concurrency.lockutils [None req-81d98a03-f8a6-4915-a8d4-594e9529a93c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "refresh_cache-9a37cf1a-fd25-48b9-923d-75a95857101b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1200.846888] env[62070]: DEBUG oslo_concurrency.lockutils [None req-81d98a03-f8a6-4915-a8d4-594e9529a93c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquired lock "refresh_cache-9a37cf1a-fd25-48b9-923d-75a95857101b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.846888] env[62070]: DEBUG nova.network.neutron [None req-81d98a03-f8a6-4915-a8d4-594e9529a93c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1201.138423] env[62070]: DEBUG nova.network.neutron [-] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.162044] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-adb1c621-2638-409e-8551-8aea2f91107c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.172085] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65224c57-649e-4816-8d3c-38a1e194d04d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.203054] env[62070]: DEBUG nova.compute.manager [req-951c6f4e-8668-442d-a17d-6b7d6cc54b08 req-1498fe77-8a1b-4b67-9871-56d2873fae67 service nova] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Detach interface failed, port_id=0eb3c7d4-224c-4d94-9450-0623a1e1b162, reason: Instance 20c4fabc-fc9b-49c7-ab28-fa092ad66038 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1201.567177] env[62070]: DEBUG nova.network.neutron [None req-81d98a03-f8a6-4915-a8d4-594e9529a93c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Updating instance_info_cache with network_info: [{"id": "fe7b579a-99c3-40b9-a975-95ddca559b4d", "address": "fa:16:3e:72:7f:c3", "network": {"id": "1ca75409-ae3f-4837-a2b6-ed2445253336", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1883034081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29ac48ab6544ec0bd1d210aec05dbc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe7b579a-99", "ovs_interfaceid": "fe7b579a-99c3-40b9-a975-95ddca559b4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.641783] env[62070]: INFO nova.compute.manager [-] [instance: 20c4fabc-fc9b-49c7-ab28-fa092ad66038] Took 1.47 seconds to deallocate network for instance. [ 1201.689384] env[62070]: INFO nova.compute.manager [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Rebuilding instance [ 1201.731229] env[62070]: DEBUG nova.compute.manager [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1201.732102] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f55f3e7-f80b-4c68-9fb2-7ba5b3fa361b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.069793] env[62070]: DEBUG oslo_concurrency.lockutils [None req-81d98a03-f8a6-4915-a8d4-594e9529a93c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Releasing lock "refresh_cache-9a37cf1a-fd25-48b9-923d-75a95857101b" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1202.070790] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0557b67b-62c7-458a-a9c8-395a39949d01 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.077698] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-81d98a03-f8a6-4915-a8d4-594e9529a93c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Resuming the VM {{(pid=62070) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1184}} [ 1202.077928] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cbf535eb-38fc-40f7-81ea-35cd265bdb93 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.084253] env[62070]: DEBUG oslo_vmware.api [None req-81d98a03-f8a6-4915-a8d4-594e9529a93c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1202.084253] env[62070]: value = "task-1122580" [ 1202.084253] env[62070]: _type = "Task" [ 1202.084253] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.092401] env[62070]: DEBUG oslo_vmware.api [None req-81d98a03-f8a6-4915-a8d4-594e9529a93c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122580, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.150773] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1202.151086] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1202.151320] env[62070]: DEBUG nova.objects.instance [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lazy-loading 'resources' on Instance uuid 20c4fabc-fc9b-49c7-ab28-fa092ad66038 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1202.242898] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1202.243236] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1fb0f1e8-e5eb-4944-bfd0-eb1d6982d120 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.250395] env[62070]: DEBUG oslo_vmware.api [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Waiting for the task: (returnval){ [ 1202.250395] env[62070]: value = "task-1122581" [ 1202.250395] env[62070]: _type = "Task" [ 1202.250395] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.258654] env[62070]: DEBUG oslo_vmware.api [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Task: {'id': task-1122581, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.598053] env[62070]: DEBUG oslo_vmware.api [None req-81d98a03-f8a6-4915-a8d4-594e9529a93c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122580, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.725488] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ddfb23d-ef53-4c00-97ac-44ef969bb9d7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.735613] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd110285-d683-47eb-ab30-4d3c10f260b3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.771147] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc42475-1c11-4dcd-bbe3-ab4c2a8f132d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.782298] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c8cecd2-1650-4507-8db3-7b0771aa009c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.786190] env[62070]: DEBUG oslo_vmware.api [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Task: {'id': task-1122581, 'name': PowerOffVM_Task, 'duration_secs': 0.267949} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.786510] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1202.787233] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1202.787905] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-90db715d-b853-490e-bf10-11e82f57c539 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.800575] env[62070]: DEBUG nova.compute.provider_tree [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1202.803247] env[62070]: DEBUG oslo_vmware.api [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Waiting for the task: (returnval){ [ 1202.803247] env[62070]: value = "task-1122582" [ 1202.803247] env[62070]: _type = "Task" [ 1202.803247] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.812788] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] VM already powered off {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1202.813025] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Volume detach. Driver type: vmdk {{(pid=62070) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1202.813239] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245525', 'volume_id': 'b510da93-2b86-4e5d-9ac1-6bde27407e94', 'name': 'volume-b510da93-2b86-4e5d-9ac1-6bde27407e94', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c2b65119-77ff-437b-8f7e-cf6e83d907bb', 'attached_at': '', 'detached_at': '', 'volume_id': 'b510da93-2b86-4e5d-9ac1-6bde27407e94', 'serial': 'b510da93-2b86-4e5d-9ac1-6bde27407e94'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1202.814107] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3586d4d-e630-459b-8658-91a03a6271a4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.834732] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0987c493-e829-4933-9ba6-def8401d480a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.842097] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33bcbe63-4767-4f07-96a9-933ee77fbc3d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.859670] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8d52ba8-11a7-428f-b675-2e569e0b6407 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.874441] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] The volume has not been displaced from its original location: [datastore2] volume-b510da93-2b86-4e5d-9ac1-6bde27407e94/volume-b510da93-2b86-4e5d-9ac1-6bde27407e94.vmdk. No consolidation needed. {{(pid=62070) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1202.879657] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Reconfiguring VM instance instance-0000006e to detach disk 2000 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1202.880227] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cac605cc-8473-472e-a386-f6f3f7888c29 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.897795] env[62070]: DEBUG oslo_vmware.api [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Waiting for the task: (returnval){ [ 1202.897795] env[62070]: value = "task-1122583" [ 1202.897795] env[62070]: _type = "Task" [ 1202.897795] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.905182] env[62070]: DEBUG oslo_vmware.api [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Task: {'id': task-1122583, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.095858] env[62070]: DEBUG oslo_vmware.api [None req-81d98a03-f8a6-4915-a8d4-594e9529a93c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122580, 'name': PowerOnVM_Task, 'duration_secs': 0.529898} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.095858] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-81d98a03-f8a6-4915-a8d4-594e9529a93c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Resumed the VM {{(pid=62070) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1189}} [ 1203.095944] env[62070]: DEBUG nova.compute.manager [None req-81d98a03-f8a6-4915-a8d4-594e9529a93c tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1203.096717] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db45e9cf-0573-43ce-8bef-84f668d038ad {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.305247] env[62070]: DEBUG nova.scheduler.client.report [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1203.410983] env[62070]: DEBUG oslo_vmware.api [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Task: {'id': task-1122583, 'name': ReconfigVM_Task, 'duration_secs': 0.165734} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.411342] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Reconfigured VM instance instance-0000006e to detach disk 2000 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1203.416916] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-669948d7-0eef-4712-9b05-0c6a974232e8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.430840] env[62070]: DEBUG oslo_vmware.api [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Waiting for the task: (returnval){ [ 1203.430840] env[62070]: value = "task-1122584" [ 1203.430840] env[62070]: _type = "Task" [ 1203.430840] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.438292] env[62070]: DEBUG oslo_vmware.api [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Task: {'id': task-1122584, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.810685] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.659s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.829737] env[62070]: INFO nova.scheduler.client.report [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Deleted allocations for instance 20c4fabc-fc9b-49c7-ab28-fa092ad66038 [ 1203.941056] env[62070]: DEBUG oslo_vmware.api [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Task: {'id': task-1122584, 'name': ReconfigVM_Task, 'duration_secs': 0.330429} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.941322] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245525', 'volume_id': 'b510da93-2b86-4e5d-9ac1-6bde27407e94', 'name': 'volume-b510da93-2b86-4e5d-9ac1-6bde27407e94', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c2b65119-77ff-437b-8f7e-cf6e83d907bb', 'attached_at': '', 'detached_at': '', 'volume_id': 'b510da93-2b86-4e5d-9ac1-6bde27407e94', 'serial': 'b510da93-2b86-4e5d-9ac1-6bde27407e94'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1203.941599] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1203.942348] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15bef96a-b40b-4826-9718-142e985d0c3b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.948594] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1203.948816] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cb570b7d-b431-497e-b37e-b8a305817b96 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.012890] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1204.013160] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1204.013355] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Deleting the datastore file [datastore2] c2b65119-77ff-437b-8f7e-cf6e83d907bb {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1204.013626] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e5b934d7-1dda-4ee4-898a-8f4360bd2a84 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.020604] env[62070]: DEBUG oslo_vmware.api [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Waiting for the task: (returnval){ [ 1204.020604] env[62070]: value = "task-1122586" [ 1204.020604] env[62070]: _type = "Task" [ 1204.020604] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.028314] env[62070]: DEBUG oslo_vmware.api [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Task: {'id': task-1122586, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.336867] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5252c2e8-d10d-4b89-9a29-70d17fb37dd6 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "20c4fabc-fc9b-49c7-ab28-fa092ad66038" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.788s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1204.532105] env[62070]: DEBUG oslo_vmware.api [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Task: {'id': task-1122586, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.089096} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.532368] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1204.532571] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1204.532783] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1204.559226] env[62070]: DEBUG oslo_concurrency.lockutils [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "9a37cf1a-fd25-48b9-923d-75a95857101b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1204.559373] env[62070]: DEBUG oslo_concurrency.lockutils [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "9a37cf1a-fd25-48b9-923d-75a95857101b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1204.559649] env[62070]: DEBUG oslo_concurrency.lockutils [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "9a37cf1a-fd25-48b9-923d-75a95857101b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1204.559846] env[62070]: DEBUG oslo_concurrency.lockutils [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "9a37cf1a-fd25-48b9-923d-75a95857101b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1204.560033] env[62070]: DEBUG oslo_concurrency.lockutils [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "9a37cf1a-fd25-48b9-923d-75a95857101b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1204.562119] env[62070]: INFO nova.compute.manager [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Terminating instance [ 1204.563858] env[62070]: DEBUG nova.compute.manager [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1204.564069] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1204.564841] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-565859e8-4a84-4cd0-a904-425e2c84dce6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.572061] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1204.572293] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-962b526f-74c5-416b-ad4e-47bba071a13e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.579399] env[62070]: DEBUG oslo_vmware.api [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1204.579399] env[62070]: value = "task-1122587" [ 1204.579399] env[62070]: _type = "Task" [ 1204.579399] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.587092] env[62070]: DEBUG oslo_vmware.api [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122587, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.587954] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Volume detach. Driver type: vmdk {{(pid=62070) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1204.588209] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1db8236c-90d4-4283-a765-676c36fb5807 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.596037] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ba20ff-a22c-44a5-8579-a6cb6b74d196 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.619928] env[62070]: ERROR nova.compute.manager [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Failed to detach volume b510da93-2b86-4e5d-9ac1-6bde27407e94 from /dev/sda: nova.exception.InstanceNotFound: Instance c2b65119-77ff-437b-8f7e-cf6e83d907bb could not be found. [ 1204.619928] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Traceback (most recent call last): [ 1204.619928] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/nova/nova/compute/manager.py", line 4142, in _do_rebuild_instance [ 1204.619928] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] self.driver.rebuild(**kwargs) [ 1204.619928] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 1204.619928] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] raise NotImplementedError() [ 1204.619928] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] NotImplementedError [ 1204.619928] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] [ 1204.619928] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] During handling of the above exception, another exception occurred: [ 1204.619928] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] [ 1204.619928] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Traceback (most recent call last): [ 1204.619928] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/nova/nova/compute/manager.py", line 3565, in _detach_root_volume [ 1204.619928] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] self.driver.detach_volume(context, old_connection_info, [ 1204.619928] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 553, in detach_volume [ 1204.619928] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] return self._volumeops.detach_volume(connection_info, instance) [ 1204.619928] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1204.619928] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] self._detach_volume_vmdk(connection_info, instance) [ 1204.619928] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1204.619928] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1204.619928] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1135, in get_vm_ref [ 1204.619928] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] stable_ref.fetch_moref(session) [ 1204.619928] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1126, in fetch_moref [ 1204.619928] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1204.619928] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] nova.exception.InstanceNotFound: Instance c2b65119-77ff-437b-8f7e-cf6e83d907bb could not be found. [ 1204.619928] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] [ 1204.747082] env[62070]: DEBUG nova.compute.utils [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Build of instance c2b65119-77ff-437b-8f7e-cf6e83d907bb aborted: Failed to rebuild volume backed instance. {{(pid=62070) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1204.749569] env[62070]: ERROR nova.compute.manager [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance c2b65119-77ff-437b-8f7e-cf6e83d907bb aborted: Failed to rebuild volume backed instance. [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Traceback (most recent call last): [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/nova/nova/compute/manager.py", line 4142, in _do_rebuild_instance [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] self.driver.rebuild(**kwargs) [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] raise NotImplementedError() [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] NotImplementedError [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] During handling of the above exception, another exception occurred: [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Traceback (most recent call last): [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/nova/nova/compute/manager.py", line 3600, in _rebuild_volume_backed_instance [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] self._detach_root_volume(context, instance, root_bdm) [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/nova/nova/compute/manager.py", line 3579, in _detach_root_volume [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] with excutils.save_and_reraise_exception(): [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] self.force_reraise() [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] raise self.value [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/nova/nova/compute/manager.py", line 3565, in _detach_root_volume [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] self.driver.detach_volume(context, old_connection_info, [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 553, in detach_volume [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] return self._volumeops.detach_volume(connection_info, instance) [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] self._detach_volume_vmdk(connection_info, instance) [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1135, in get_vm_ref [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] stable_ref.fetch_moref(session) [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1126, in fetch_moref [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] nova.exception.InstanceNotFound: Instance c2b65119-77ff-437b-8f7e-cf6e83d907bb could not be found. [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] During handling of the above exception, another exception occurred: [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Traceback (most recent call last): [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/nova/nova/compute/manager.py", line 10865, in _error_out_instance_on_exception [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] yield [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/nova/nova/compute/manager.py", line 3868, in rebuild_instance [ 1204.749569] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] self._do_rebuild_instance_with_claim( [ 1204.750724] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/nova/nova/compute/manager.py", line 3954, in _do_rebuild_instance_with_claim [ 1204.750724] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] self._do_rebuild_instance( [ 1204.750724] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/nova/nova/compute/manager.py", line 4146, in _do_rebuild_instance [ 1204.750724] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] self._rebuild_default_impl(**kwargs) [ 1204.750724] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/nova/nova/compute/manager.py", line 3723, in _rebuild_default_impl [ 1204.750724] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] self._rebuild_volume_backed_instance( [ 1204.750724] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] File "/opt/stack/nova/nova/compute/manager.py", line 3615, in _rebuild_volume_backed_instance [ 1204.750724] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] raise exception.BuildAbortException( [ 1204.750724] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] nova.exception.BuildAbortException: Build of instance c2b65119-77ff-437b-8f7e-cf6e83d907bb aborted: Failed to rebuild volume backed instance. [ 1204.750724] env[62070]: ERROR nova.compute.manager [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] [ 1205.089298] env[62070]: DEBUG oslo_vmware.api [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122587, 'name': PowerOffVM_Task, 'duration_secs': 0.202296} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.089728] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1205.089728] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1205.090273] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6674ec5f-3f11-44eb-a0be-cf13d1c6abc1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.159878] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1205.160114] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1205.160303] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Deleting the datastore file [datastore1] 9a37cf1a-fd25-48b9-923d-75a95857101b {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1205.160611] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7cc28399-5f96-417d-9f48-d6a360e897a2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.166578] env[62070]: DEBUG oslo_vmware.api [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for the task: (returnval){ [ 1205.166578] env[62070]: value = "task-1122589" [ 1205.166578] env[62070]: _type = "Task" [ 1205.166578] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.176211] env[62070]: DEBUG oslo_vmware.api [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122589, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.676924] env[62070]: DEBUG oslo_vmware.api [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Task: {'id': task-1122589, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152919} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.677212] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1205.677427] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1205.677614] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1205.677796] env[62070]: INFO nova.compute.manager [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1205.678054] env[62070]: DEBUG oslo.service.loopingcall [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1205.678267] env[62070]: DEBUG nova.compute.manager [-] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1205.678360] env[62070]: DEBUG nova.network.neutron [-] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1206.146863] env[62070]: DEBUG nova.compute.manager [req-1a9c5bdc-3398-4a9c-9a27-65e034529856 req-c4147e4b-0e9c-4d0a-9e4f-6fe280a8b968 service nova] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Received event network-vif-deleted-fe7b579a-99c3-40b9-a975-95ddca559b4d {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1206.147102] env[62070]: INFO nova.compute.manager [req-1a9c5bdc-3398-4a9c-9a27-65e034529856 req-c4147e4b-0e9c-4d0a-9e4f-6fe280a8b968 service nova] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Neutron deleted interface fe7b579a-99c3-40b9-a975-95ddca559b4d; detaching it from the instance and deleting it from the info cache [ 1206.147203] env[62070]: DEBUG nova.network.neutron [req-1a9c5bdc-3398-4a9c-9a27-65e034529856 req-c4147e4b-0e9c-4d0a-9e4f-6fe280a8b968 service nova] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.624281] env[62070]: DEBUG nova.network.neutron [-] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.634027] env[62070]: DEBUG oslo_concurrency.lockutils [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "229bb9ef-b8d8-40cb-a589-3aa280b904d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.634027] env[62070]: DEBUG oslo_concurrency.lockutils [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "229bb9ef-b8d8-40cb-a589-3aa280b904d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1206.651126] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9dc467e2-70a8-47ac-b5d0-92612310ddad {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.660793] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38626397-986e-4242-a858-65a563e6df0e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.684427] env[62070]: DEBUG nova.compute.manager [req-1a9c5bdc-3398-4a9c-9a27-65e034529856 req-c4147e4b-0e9c-4d0a-9e4f-6fe280a8b968 service nova] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Detach interface failed, port_id=fe7b579a-99c3-40b9-a975-95ddca559b4d, reason: Instance 9a37cf1a-fd25-48b9-923d-75a95857101b could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1206.764022] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.764321] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1206.810130] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b301ed-18df-4d8c-aad7-f388831e7836 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.817802] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce633746-fb18-4762-8e28-9e4cd2e69047 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.848166] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8473dce-e816-46ba-811b-e315e0999694 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.855436] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e47b0db4-abc0-4b0c-93db-520dee828456 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.868409] env[62070]: DEBUG nova.compute.provider_tree [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1207.127303] env[62070]: INFO nova.compute.manager [-] [instance: 9a37cf1a-fd25-48b9-923d-75a95857101b] Took 1.45 seconds to deallocate network for instance. [ 1207.135561] env[62070]: DEBUG nova.compute.manager [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1207.371748] env[62070]: DEBUG nova.scheduler.client.report [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1207.634503] env[62070]: DEBUG oslo_concurrency.lockutils [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1207.657564] env[62070]: DEBUG oslo_concurrency.lockutils [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1207.742540] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bee5c30d-5818-43fe-a236-abfcb6dccd46 tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Acquiring lock "c2b65119-77ff-437b-8f7e-cf6e83d907bb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1207.742805] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bee5c30d-5818-43fe-a236-abfcb6dccd46 tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Lock "c2b65119-77ff-437b-8f7e-cf6e83d907bb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1207.743042] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bee5c30d-5818-43fe-a236-abfcb6dccd46 tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Acquiring lock "c2b65119-77ff-437b-8f7e-cf6e83d907bb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1207.743239] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bee5c30d-5818-43fe-a236-abfcb6dccd46 tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Lock "c2b65119-77ff-437b-8f7e-cf6e83d907bb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1207.743418] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bee5c30d-5818-43fe-a236-abfcb6dccd46 tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Lock "c2b65119-77ff-437b-8f7e-cf6e83d907bb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1207.745654] env[62070]: INFO nova.compute.manager [None req-bee5c30d-5818-43fe-a236-abfcb6dccd46 tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Terminating instance [ 1207.747578] env[62070]: DEBUG nova.compute.manager [None req-bee5c30d-5818-43fe-a236-abfcb6dccd46 tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1207.747872] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1b53d01e-9ef6-4bf8-ad34-68c2b3489e7c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.757847] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2326a6-f8b7-4d12-9e04-45da2d0ae8fe {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.785098] env[62070]: WARNING nova.virt.vmwareapi.driver [None req-bee5c30d-5818-43fe-a236-abfcb6dccd46 tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance c2b65119-77ff-437b-8f7e-cf6e83d907bb could not be found. [ 1207.785438] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bee5c30d-5818-43fe-a236-abfcb6dccd46 tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1207.785857] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b0eb3b33-d429-47f3-be5c-0a0cfd72f9fd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.797277] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c495946-74e4-4f05-b200-33045511f913 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.829890] env[62070]: WARNING nova.virt.vmwareapi.vmops [None req-bee5c30d-5818-43fe-a236-abfcb6dccd46 tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c2b65119-77ff-437b-8f7e-cf6e83d907bb could not be found. [ 1207.830221] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-bee5c30d-5818-43fe-a236-abfcb6dccd46 tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1207.830538] env[62070]: INFO nova.compute.manager [None req-bee5c30d-5818-43fe-a236-abfcb6dccd46 tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Took 0.08 seconds to destroy the instance on the hypervisor. [ 1207.830916] env[62070]: DEBUG oslo.service.loopingcall [None req-bee5c30d-5818-43fe-a236-abfcb6dccd46 tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1207.831257] env[62070]: DEBUG nova.compute.manager [-] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1207.831407] env[62070]: DEBUG nova.network.neutron [-] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1207.877141] env[62070]: DEBUG oslo_concurrency.lockutils [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.113s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1207.877535] env[62070]: INFO nova.compute.manager [None req-f82b88f7-0b97-44b7-913e-68fa9de6122f tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Successfully reverted task state from rebuilding on failure for instance. [ 1207.885546] env[62070]: DEBUG oslo_concurrency.lockutils [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.251s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1207.885815] env[62070]: DEBUG nova.objects.instance [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lazy-loading 'resources' on Instance uuid 9a37cf1a-fd25-48b9-923d-75a95857101b {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1208.293254] env[62070]: DEBUG nova.compute.manager [req-4602de20-d232-49d6-a906-d4c9001d81e0 req-93e2bed2-9b1d-4be3-a2bb-2d82683229ff service nova] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Received event network-vif-deleted-7e82bdd5-e879-4fcd-bb16-f0a1be82998b {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1208.293489] env[62070]: INFO nova.compute.manager [req-4602de20-d232-49d6-a906-d4c9001d81e0 req-93e2bed2-9b1d-4be3-a2bb-2d82683229ff service nova] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Neutron deleted interface 7e82bdd5-e879-4fcd-bb16-f0a1be82998b; detaching it from the instance and deleting it from the info cache [ 1208.293657] env[62070]: DEBUG nova.network.neutron [req-4602de20-d232-49d6-a906-d4c9001d81e0 req-93e2bed2-9b1d-4be3-a2bb-2d82683229ff service nova] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1208.449394] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a97b5fec-5bd6-436d-aba4-11695e86840e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.457737] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f94a9bd-f358-418e-9aa9-bc0abc86e775 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.493203] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a56f713-fe3e-468c-98ac-d264b90af58b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.501712] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1de4da45-5923-4a72-926a-3696546308c4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.514322] env[62070]: DEBUG nova.compute.provider_tree [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1208.771103] env[62070]: DEBUG nova.network.neutron [-] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1208.795809] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d4efa52f-d146-439f-a394-02f0f3b4d761 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.808197] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc5c0b8-0aa5-40dd-b7ba-7e49fc97bfbe {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.830408] env[62070]: DEBUG nova.compute.manager [req-4602de20-d232-49d6-a906-d4c9001d81e0 req-93e2bed2-9b1d-4be3-a2bb-2d82683229ff service nova] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Detach interface failed, port_id=7e82bdd5-e879-4fcd-bb16-f0a1be82998b, reason: Instance c2b65119-77ff-437b-8f7e-cf6e83d907bb could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1209.017732] env[62070]: DEBUG nova.scheduler.client.report [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1209.273732] env[62070]: INFO nova.compute.manager [-] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Took 1.44 seconds to deallocate network for instance. [ 1209.529011] env[62070]: DEBUG oslo_concurrency.lockutils [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.643s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1209.532627] env[62070]: DEBUG oslo_concurrency.lockutils [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.874s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1209.536663] env[62070]: INFO nova.compute.claims [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1209.558437] env[62070]: INFO nova.scheduler.client.report [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Deleted allocations for instance 9a37cf1a-fd25-48b9-923d-75a95857101b [ 1209.818517] env[62070]: INFO nova.compute.manager [None req-bee5c30d-5818-43fe-a236-abfcb6dccd46 tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Took 0.54 seconds to detach 1 volumes for instance. [ 1209.822705] env[62070]: DEBUG nova.compute.manager [None req-bee5c30d-5818-43fe-a236-abfcb6dccd46 tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] [instance: c2b65119-77ff-437b-8f7e-cf6e83d907bb] Deleting volume: b510da93-2b86-4e5d-9ac1-6bde27407e94 {{(pid=62070) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3247}} [ 1210.068034] env[62070]: DEBUG oslo_concurrency.lockutils [None req-77b63e68-a9d7-45a0-aad8-27e66bee4823 tempest-ServerActionsTestJSON-67612943 tempest-ServerActionsTestJSON-67612943-project-member] Lock "9a37cf1a-fd25-48b9-923d-75a95857101b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.508s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1210.370107] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bee5c30d-5818-43fe-a236-abfcb6dccd46 tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1210.591770] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed6158b1-96e8-43b1-86a3-6f7df9c668d8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.598768] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d99d4f-f35c-4e08-b9f7-29688f2663d8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.629122] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bcd624e-9108-49f9-bc9d-467f3054d51d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.636116] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2df52dd-373f-40ee-bca1-51b515f5321d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.648816] env[62070]: DEBUG nova.compute.provider_tree [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1211.151901] env[62070]: DEBUG nova.scheduler.client.report [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1211.657497] env[62070]: DEBUG oslo_concurrency.lockutils [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.126s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1211.658112] env[62070]: DEBUG nova.compute.manager [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1211.660811] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bee5c30d-5818-43fe-a236-abfcb6dccd46 tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.291s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1211.661382] env[62070]: DEBUG nova.objects.instance [None req-bee5c30d-5818-43fe-a236-abfcb6dccd46 tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Lazy-loading 'resources' on Instance uuid c2b65119-77ff-437b-8f7e-cf6e83d907bb {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1212.165633] env[62070]: DEBUG nova.compute.utils [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1212.169856] env[62070]: DEBUG nova.compute.manager [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Allocating IP information in the background. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1212.170039] env[62070]: DEBUG nova.network.neutron [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] allocate_for_instance() {{(pid=62070) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1212.219237] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb0a7c3-9d27-49cb-8c29-c8b5b91eadec {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.229139] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f6680a-b261-48be-a0c5-109e0bcc2374 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.235443] env[62070]: DEBUG nova.policy [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '234556dc76884adb8859102c456672f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca25fba006b740f2a86fe10e4abe9400', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62070) authorize /opt/stack/nova/nova/policy.py:201}} [ 1212.267202] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21bc8482-6b2d-4ae3-948e-5edf040f54f2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.276442] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62133d21-a7ee-4573-8657-f26cb0bcac32 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.290747] env[62070]: DEBUG nova.compute.provider_tree [None req-bee5c30d-5818-43fe-a236-abfcb6dccd46 tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1212.635691] env[62070]: DEBUG nova.network.neutron [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Successfully created port: d5bcacdd-b774-4af1-aa33-c5f4ec9198f2 {{(pid=62070) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1212.670439] env[62070]: DEBUG nova.compute.manager [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1212.794286] env[62070]: DEBUG nova.scheduler.client.report [None req-bee5c30d-5818-43fe-a236-abfcb6dccd46 tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1213.299645] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bee5c30d-5818-43fe-a236-abfcb6dccd46 tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.639s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1213.680714] env[62070]: DEBUG nova.compute.manager [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1213.707025] env[62070]: DEBUG nova.virt.hardware [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1213.707180] env[62070]: DEBUG nova.virt.hardware [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1213.707238] env[62070]: DEBUG nova.virt.hardware [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1213.707410] env[62070]: DEBUG nova.virt.hardware [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1213.707604] env[62070]: DEBUG nova.virt.hardware [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1213.707766] env[62070]: DEBUG nova.virt.hardware [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1213.707977] env[62070]: DEBUG nova.virt.hardware [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1213.708159] env[62070]: DEBUG nova.virt.hardware [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1213.708335] env[62070]: DEBUG nova.virt.hardware [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1213.708501] env[62070]: DEBUG nova.virt.hardware [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1213.708678] env[62070]: DEBUG nova.virt.hardware [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1213.709603] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dd53d3e-7e49-4b67-913c-554a0ddf1b07 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.718600] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f2fd98f-a55c-4bb8-b0dc-e14f05253fd3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.817953] env[62070]: DEBUG oslo_concurrency.lockutils [None req-bee5c30d-5818-43fe-a236-abfcb6dccd46 tempest-ServerActionsV293TestJSON-1711639707 tempest-ServerActionsV293TestJSON-1711639707-project-member] Lock "c2b65119-77ff-437b-8f7e-cf6e83d907bb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.075s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1213.876419] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Acquiring lock "3d7d7826-8690-4618-ac77-77e08afc6596" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1213.876563] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Lock "3d7d7826-8690-4618-ac77-77e08afc6596" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1214.002653] env[62070]: DEBUG nova.compute.manager [req-a8e3fd38-c1a6-4bbc-b3f7-7a08540455a4 req-34f014c1-46ff-4817-88d8-4b5a05a96177 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Received event network-vif-plugged-d5bcacdd-b774-4af1-aa33-c5f4ec9198f2 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1214.002859] env[62070]: DEBUG oslo_concurrency.lockutils [req-a8e3fd38-c1a6-4bbc-b3f7-7a08540455a4 req-34f014c1-46ff-4817-88d8-4b5a05a96177 service nova] Acquiring lock "229bb9ef-b8d8-40cb-a589-3aa280b904d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1214.003148] env[62070]: DEBUG oslo_concurrency.lockutils [req-a8e3fd38-c1a6-4bbc-b3f7-7a08540455a4 req-34f014c1-46ff-4817-88d8-4b5a05a96177 service nova] Lock "229bb9ef-b8d8-40cb-a589-3aa280b904d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1214.003264] env[62070]: DEBUG oslo_concurrency.lockutils [req-a8e3fd38-c1a6-4bbc-b3f7-7a08540455a4 req-34f014c1-46ff-4817-88d8-4b5a05a96177 service nova] Lock "229bb9ef-b8d8-40cb-a589-3aa280b904d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1214.003429] env[62070]: DEBUG nova.compute.manager [req-a8e3fd38-c1a6-4bbc-b3f7-7a08540455a4 req-34f014c1-46ff-4817-88d8-4b5a05a96177 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] No waiting events found dispatching network-vif-plugged-d5bcacdd-b774-4af1-aa33-c5f4ec9198f2 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1214.003595] env[62070]: WARNING nova.compute.manager [req-a8e3fd38-c1a6-4bbc-b3f7-7a08540455a4 req-34f014c1-46ff-4817-88d8-4b5a05a96177 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Received unexpected event network-vif-plugged-d5bcacdd-b774-4af1-aa33-c5f4ec9198f2 for instance with vm_state building and task_state spawning. [ 1214.092423] env[62070]: DEBUG nova.network.neutron [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Successfully updated port: d5bcacdd-b774-4af1-aa33-c5f4ec9198f2 {{(pid=62070) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1214.378945] env[62070]: DEBUG nova.compute.manager [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1214.595136] env[62070]: DEBUG oslo_concurrency.lockutils [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "refresh_cache-229bb9ef-b8d8-40cb-a589-3aa280b904d7" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1214.595312] env[62070]: DEBUG oslo_concurrency.lockutils [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquired lock "refresh_cache-229bb9ef-b8d8-40cb-a589-3aa280b904d7" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1214.595725] env[62070]: DEBUG nova.network.neutron [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1214.899740] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1214.900046] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1214.901782] env[62070]: INFO nova.compute.claims [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1215.130476] env[62070]: DEBUG nova.network.neutron [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1215.252893] env[62070]: DEBUG nova.network.neutron [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Updating instance_info_cache with network_info: [{"id": "d5bcacdd-b774-4af1-aa33-c5f4ec9198f2", "address": "fa:16:3e:38:d5:88", "network": {"id": "b9ef8f6c-bbd6-409d-a591-ad584e5e028f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-599171324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca25fba006b740f2a86fe10e4abe9400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5bcacdd-b7", "ovs_interfaceid": "d5bcacdd-b774-4af1-aa33-c5f4ec9198f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1215.756157] env[62070]: DEBUG oslo_concurrency.lockutils [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Releasing lock "refresh_cache-229bb9ef-b8d8-40cb-a589-3aa280b904d7" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1215.756450] env[62070]: DEBUG nova.compute.manager [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Instance network_info: |[{"id": "d5bcacdd-b774-4af1-aa33-c5f4ec9198f2", "address": "fa:16:3e:38:d5:88", "network": {"id": "b9ef8f6c-bbd6-409d-a591-ad584e5e028f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-599171324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca25fba006b740f2a86fe10e4abe9400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5bcacdd-b7", "ovs_interfaceid": "d5bcacdd-b774-4af1-aa33-c5f4ec9198f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1215.756906] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:d5:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa8c2f93-f287-41b3-adb6-4942a7ea2a0b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd5bcacdd-b774-4af1-aa33-c5f4ec9198f2', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1215.764588] env[62070]: DEBUG oslo.service.loopingcall [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1215.764806] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1215.765042] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8ea815ff-2bfa-4d26-81ae-362d7d41f0b1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.784416] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1215.784416] env[62070]: value = "task-1122591" [ 1215.784416] env[62070]: _type = "Task" [ 1215.784416] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.792025] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122591, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.955675] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7756deaa-2043-469a-8735-92ba8fa8e7d2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.963317] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24fd02c3-f02f-4257-90da-f101f1d6f4a4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.995130] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c60b5d7-2286-4a73-8221-8f8c61a79035 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.003723] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3023fff2-689f-4178-b175-941a34b4b826 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.017061] env[62070]: DEBUG nova.compute.provider_tree [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1216.028297] env[62070]: DEBUG nova.compute.manager [req-6c13522f-4b9e-42de-a0a2-2b13fa375b9c req-8cb7afa7-aa50-4ba2-ad11-00da1ab6bc52 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Received event network-changed-d5bcacdd-b774-4af1-aa33-c5f4ec9198f2 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1216.028388] env[62070]: DEBUG nova.compute.manager [req-6c13522f-4b9e-42de-a0a2-2b13fa375b9c req-8cb7afa7-aa50-4ba2-ad11-00da1ab6bc52 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Refreshing instance network info cache due to event network-changed-d5bcacdd-b774-4af1-aa33-c5f4ec9198f2. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1216.028570] env[62070]: DEBUG oslo_concurrency.lockutils [req-6c13522f-4b9e-42de-a0a2-2b13fa375b9c req-8cb7afa7-aa50-4ba2-ad11-00da1ab6bc52 service nova] Acquiring lock "refresh_cache-229bb9ef-b8d8-40cb-a589-3aa280b904d7" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1216.028687] env[62070]: DEBUG oslo_concurrency.lockutils [req-6c13522f-4b9e-42de-a0a2-2b13fa375b9c req-8cb7afa7-aa50-4ba2-ad11-00da1ab6bc52 service nova] Acquired lock "refresh_cache-229bb9ef-b8d8-40cb-a589-3aa280b904d7" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1216.028856] env[62070]: DEBUG nova.network.neutron [req-6c13522f-4b9e-42de-a0a2-2b13fa375b9c req-8cb7afa7-aa50-4ba2-ad11-00da1ab6bc52 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Refreshing network info cache for port d5bcacdd-b774-4af1-aa33-c5f4ec9198f2 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1216.294401] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122591, 'name': CreateVM_Task, 'duration_secs': 0.319805} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.294589] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1216.295278] env[62070]: DEBUG oslo_concurrency.lockutils [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1216.295455] env[62070]: DEBUG oslo_concurrency.lockutils [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1216.295784] env[62070]: DEBUG oslo_concurrency.lockutils [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1216.296045] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1132f9d-915d-4d0c-822d-afd06bb3038b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.301127] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1216.301127] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52c65649-e31b-967d-1bcd-e637e6b583fb" [ 1216.301127] env[62070]: _type = "Task" [ 1216.301127] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.308777] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52c65649-e31b-967d-1bcd-e637e6b583fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.520335] env[62070]: DEBUG nova.scheduler.client.report [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1216.706290] env[62070]: DEBUG nova.network.neutron [req-6c13522f-4b9e-42de-a0a2-2b13fa375b9c req-8cb7afa7-aa50-4ba2-ad11-00da1ab6bc52 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Updated VIF entry in instance network info cache for port d5bcacdd-b774-4af1-aa33-c5f4ec9198f2. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1216.706655] env[62070]: DEBUG nova.network.neutron [req-6c13522f-4b9e-42de-a0a2-2b13fa375b9c req-8cb7afa7-aa50-4ba2-ad11-00da1ab6bc52 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Updating instance_info_cache with network_info: [{"id": "d5bcacdd-b774-4af1-aa33-c5f4ec9198f2", "address": "fa:16:3e:38:d5:88", "network": {"id": "b9ef8f6c-bbd6-409d-a591-ad584e5e028f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-599171324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca25fba006b740f2a86fe10e4abe9400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5bcacdd-b7", "ovs_interfaceid": "d5bcacdd-b774-4af1-aa33-c5f4ec9198f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1216.811095] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52c65649-e31b-967d-1bcd-e637e6b583fb, 'name': SearchDatastore_Task, 'duration_secs': 0.010254} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.811355] env[62070]: DEBUG oslo_concurrency.lockutils [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1216.811595] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1216.811832] env[62070]: DEBUG oslo_concurrency.lockutils [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1216.812015] env[62070]: DEBUG oslo_concurrency.lockutils [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1216.812223] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1216.812481] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0ce65ea6-ab7c-497a-b2d5-1df22091e33a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.820046] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1216.820220] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1216.820898] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7b0692e-7abc-48f5-a1f1-d9b98889d4d1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.825772] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1216.825772] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ee168d-e993-5ddb-d1bc-62506d7e57ac" [ 1216.825772] env[62070]: _type = "Task" [ 1216.825772] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.832993] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52ee168d-e993-5ddb-d1bc-62506d7e57ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.025265] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.125s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1217.025847] env[62070]: DEBUG nova.compute.manager [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1217.209602] env[62070]: DEBUG oslo_concurrency.lockutils [req-6c13522f-4b9e-42de-a0a2-2b13fa375b9c req-8cb7afa7-aa50-4ba2-ad11-00da1ab6bc52 service nova] Releasing lock "refresh_cache-229bb9ef-b8d8-40cb-a589-3aa280b904d7" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1217.336309] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52ee168d-e993-5ddb-d1bc-62506d7e57ac, 'name': SearchDatastore_Task, 'duration_secs': 0.009552} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.337106] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91270be9-7a2f-4c02-ad1b-fc401dc864fd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.342128] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1217.342128] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]527ae5b6-e37d-40a3-854c-1c6d5a01033d" [ 1217.342128] env[62070]: _type = "Task" [ 1217.342128] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.349579] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527ae5b6-e37d-40a3-854c-1c6d5a01033d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.530808] env[62070]: DEBUG nova.compute.utils [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1217.531951] env[62070]: DEBUG nova.compute.manager [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Not allocating networking since 'none' was specified. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1217.852943] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]527ae5b6-e37d-40a3-854c-1c6d5a01033d, 'name': SearchDatastore_Task, 'duration_secs': 0.009415} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.853225] env[62070]: DEBUG oslo_concurrency.lockutils [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1217.853478] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 229bb9ef-b8d8-40cb-a589-3aa280b904d7/229bb9ef-b8d8-40cb-a589-3aa280b904d7.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1217.853732] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-422294dd-c0c5-487f-b0aa-ad25bc0c27e6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.860765] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1217.860765] env[62070]: value = "task-1122592" [ 1217.860765] env[62070]: _type = "Task" [ 1217.860765] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.869717] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122592, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.033781] env[62070]: DEBUG nova.compute.manager [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1218.370848] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122592, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470869} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.371084] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 229bb9ef-b8d8-40cb-a589-3aa280b904d7/229bb9ef-b8d8-40cb-a589-3aa280b904d7.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1218.371279] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1218.371529] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1dd7a934-38d5-442a-9878-30eb7f9d8ccf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.378057] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1218.378057] env[62070]: value = "task-1122593" [ 1218.378057] env[62070]: _type = "Task" [ 1218.378057] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.385339] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122593, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.888334] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122593, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.399125} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.888728] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1218.889516] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c33ea84d-39da-4da8-b1a4-0b1b2d523dd0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.911626] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 229bb9ef-b8d8-40cb-a589-3aa280b904d7/229bb9ef-b8d8-40cb-a589-3aa280b904d7.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1218.911940] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c77d641e-0524-478e-b4f9-45cc3da1aaa8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.931324] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1218.931324] env[62070]: value = "task-1122594" [ 1218.931324] env[62070]: _type = "Task" [ 1218.931324] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.939313] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122594, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.043650] env[62070]: DEBUG nova.compute.manager [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1219.069936] env[62070]: DEBUG nova.virt.hardware [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1219.070225] env[62070]: DEBUG nova.virt.hardware [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1219.070362] env[62070]: DEBUG nova.virt.hardware [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1219.070549] env[62070]: DEBUG nova.virt.hardware [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1219.070736] env[62070]: DEBUG nova.virt.hardware [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1219.070892] env[62070]: DEBUG nova.virt.hardware [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1219.071137] env[62070]: DEBUG nova.virt.hardware [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1219.071313] env[62070]: DEBUG nova.virt.hardware [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1219.071487] env[62070]: DEBUG nova.virt.hardware [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1219.071658] env[62070]: DEBUG nova.virt.hardware [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1219.071840] env[62070]: DEBUG nova.virt.hardware [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1219.072714] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f5e50f3-8f5e-40f8-89f1-e4070e87ecd3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.080567] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b63a946-0b6a-4173-8a30-13068aeecfc1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.095162] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Instance VIF info [] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1219.100520] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Creating folder: Project (764913dd24334b7781edc2ca94073989). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1219.100778] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-99f317a8-fa03-479a-87aa-5fc69de30d40 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.110739] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Created folder: Project (764913dd24334b7781edc2ca94073989) in parent group-v245319. [ 1219.110936] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Creating folder: Instances. Parent ref: group-v245534. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1219.111201] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a33c44e3-3caa-46b3-8891-35c20b5791de {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.119893] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Created folder: Instances in parent group-v245534. [ 1219.120175] env[62070]: DEBUG oslo.service.loopingcall [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1219.120376] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1219.120612] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9ff9e4a-8c74-4c81-9187-ba977dd6c2c5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.137439] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1219.137439] env[62070]: value = "task-1122597" [ 1219.137439] env[62070]: _type = "Task" [ 1219.137439] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.144852] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122597, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.441858] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122594, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.647030] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122597, 'name': CreateVM_Task, 'duration_secs': 0.473385} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.647225] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1219.647678] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1219.647862] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1219.648215] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1219.648482] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40e1f2c7-a588-4139-baea-0b111c9a440d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.652946] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Waiting for the task: (returnval){ [ 1219.652946] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52af68dd-c2a0-114e-2417-187e807abe05" [ 1219.652946] env[62070]: _type = "Task" [ 1219.652946] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.660331] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52af68dd-c2a0-114e-2417-187e807abe05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.942189] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122594, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.164096] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52af68dd-c2a0-114e-2417-187e807abe05, 'name': SearchDatastore_Task, 'duration_secs': 0.009566} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.164547] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1220.164632] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1220.164867] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1220.165031] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Acquired lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1220.165232] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1220.165490] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb9f723a-c0f7-4ce3-8dd6-244b42a90f88 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.173910] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1220.174098] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1220.174784] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47b31bb1-f0fb-4c07-84a7-13aebdb12be0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.180351] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Waiting for the task: (returnval){ [ 1220.180351] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52a20da2-01e8-c593-f6e5-e3b8e3141a00" [ 1220.180351] env[62070]: _type = "Task" [ 1220.180351] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.187655] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52a20da2-01e8-c593-f6e5-e3b8e3141a00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.442251] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122594, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.690529] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52a20da2-01e8-c593-f6e5-e3b8e3141a00, 'name': SearchDatastore_Task, 'duration_secs': 0.008833} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.691332] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-241021bb-5470-4e4b-afaa-d45ad0fadd6c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.696294] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Waiting for the task: (returnval){ [ 1220.696294] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52c6ccd7-499c-2976-da32-f53df9e2b49f" [ 1220.696294] env[62070]: _type = "Task" [ 1220.696294] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.704121] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52c6ccd7-499c-2976-da32-f53df9e2b49f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.943244] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122594, 'name': ReconfigVM_Task, 'duration_secs': 1.704298} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.943496] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 229bb9ef-b8d8-40cb-a589-3aa280b904d7/229bb9ef-b8d8-40cb-a589-3aa280b904d7.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1220.944140] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-28cd6193-0cc8-454d-a607-48cdbdaf2451 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.952927] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1220.952927] env[62070]: value = "task-1122598" [ 1220.952927] env[62070]: _type = "Task" [ 1220.952927] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.960208] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122598, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.207985] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52c6ccd7-499c-2976-da32-f53df9e2b49f, 'name': SearchDatastore_Task, 'duration_secs': 0.010523} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.208401] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Releasing lock "[datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1221.208739] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 3d7d7826-8690-4618-ac77-77e08afc6596/3d7d7826-8690-4618-ac77-77e08afc6596.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1221.209076] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dbceb000-4e79-44f8-8b9e-d859b683ec9e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.218474] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Waiting for the task: (returnval){ [ 1221.218474] env[62070]: value = "task-1122599" [ 1221.218474] env[62070]: _type = "Task" [ 1221.218474] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.226355] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Task: {'id': task-1122599, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.463126] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122598, 'name': Rename_Task, 'duration_secs': 0.146275} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.463359] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1221.463624] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c515bec8-1492-4b55-982b-f1f9ec18565f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.471741] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1221.471741] env[62070]: value = "task-1122600" [ 1221.471741] env[62070]: _type = "Task" [ 1221.471741] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.484693] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122600, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.728023] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Task: {'id': task-1122599, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.450134} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.728289] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore1] 3d7d7826-8690-4618-ac77-77e08afc6596/3d7d7826-8690-4618-ac77-77e08afc6596.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1221.728508] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1221.728757] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-55940802-a7e0-40c3-babe-5e9035a5b11d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.735491] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Waiting for the task: (returnval){ [ 1221.735491] env[62070]: value = "task-1122601" [ 1221.735491] env[62070]: _type = "Task" [ 1221.735491] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.743402] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Task: {'id': task-1122601, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.982515] env[62070]: DEBUG oslo_vmware.api [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122600, 'name': PowerOnVM_Task, 'duration_secs': 0.474987} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.982767] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1221.983058] env[62070]: INFO nova.compute.manager [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Took 8.30 seconds to spawn the instance on the hypervisor. [ 1221.983292] env[62070]: DEBUG nova.compute.manager [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1221.984080] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bca6773-67a0-494e-a2ef-12f3d3db7ebb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.244369] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Task: {'id': task-1122601, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067577} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.244625] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1222.245366] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3b4a6e7-adc3-48db-9fcf-9f2e945738b3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.264109] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] 3d7d7826-8690-4618-ac77-77e08afc6596/3d7d7826-8690-4618-ac77-77e08afc6596.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1222.264347] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ec020fe-e888-4450-a4e1-40629b852f6c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.282109] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Waiting for the task: (returnval){ [ 1222.282109] env[62070]: value = "task-1122602" [ 1222.282109] env[62070]: _type = "Task" [ 1222.282109] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.290347] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Task: {'id': task-1122602, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.500715] env[62070]: INFO nova.compute.manager [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Took 14.86 seconds to build instance. [ 1222.792793] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Task: {'id': task-1122602, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.002656] env[62070]: DEBUG oslo_concurrency.lockutils [None req-33ff662a-2d16-4f9a-81a9-6f5187eccab3 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "229bb9ef-b8d8-40cb-a589-3aa280b904d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.369s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1223.212880] env[62070]: DEBUG nova.compute.manager [req-0526fc53-3979-41c3-baf5-01bc75b223c2 req-7a45d946-e703-444e-91b7-ce6975fd08a9 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Received event network-changed-d5bcacdd-b774-4af1-aa33-c5f4ec9198f2 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1223.213097] env[62070]: DEBUG nova.compute.manager [req-0526fc53-3979-41c3-baf5-01bc75b223c2 req-7a45d946-e703-444e-91b7-ce6975fd08a9 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Refreshing instance network info cache due to event network-changed-d5bcacdd-b774-4af1-aa33-c5f4ec9198f2. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1223.213335] env[62070]: DEBUG oslo_concurrency.lockutils [req-0526fc53-3979-41c3-baf5-01bc75b223c2 req-7a45d946-e703-444e-91b7-ce6975fd08a9 service nova] Acquiring lock "refresh_cache-229bb9ef-b8d8-40cb-a589-3aa280b904d7" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1223.213594] env[62070]: DEBUG oslo_concurrency.lockutils [req-0526fc53-3979-41c3-baf5-01bc75b223c2 req-7a45d946-e703-444e-91b7-ce6975fd08a9 service nova] Acquired lock "refresh_cache-229bb9ef-b8d8-40cb-a589-3aa280b904d7" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1223.213666] env[62070]: DEBUG nova.network.neutron [req-0526fc53-3979-41c3-baf5-01bc75b223c2 req-7a45d946-e703-444e-91b7-ce6975fd08a9 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Refreshing network info cache for port d5bcacdd-b774-4af1-aa33-c5f4ec9198f2 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1223.293192] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Task: {'id': task-1122602, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.793053] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Task: {'id': task-1122602, 'name': ReconfigVM_Task, 'duration_secs': 1.235986} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.793208] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Reconfigured VM instance instance-00000071 to attach disk [datastore1] 3d7d7826-8690-4618-ac77-77e08afc6596/3d7d7826-8690-4618-ac77-77e08afc6596.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1223.793866] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2baa35c-7fd2-4ad7-a154-e973d7463ebf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.799467] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Waiting for the task: (returnval){ [ 1223.799467] env[62070]: value = "task-1122603" [ 1223.799467] env[62070]: _type = "Task" [ 1223.799467] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.806980] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Task: {'id': task-1122603, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.939311] env[62070]: DEBUG nova.network.neutron [req-0526fc53-3979-41c3-baf5-01bc75b223c2 req-7a45d946-e703-444e-91b7-ce6975fd08a9 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Updated VIF entry in instance network info cache for port d5bcacdd-b774-4af1-aa33-c5f4ec9198f2. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1223.939721] env[62070]: DEBUG nova.network.neutron [req-0526fc53-3979-41c3-baf5-01bc75b223c2 req-7a45d946-e703-444e-91b7-ce6975fd08a9 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Updating instance_info_cache with network_info: [{"id": "d5bcacdd-b774-4af1-aa33-c5f4ec9198f2", "address": "fa:16:3e:38:d5:88", "network": {"id": "b9ef8f6c-bbd6-409d-a591-ad584e5e028f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-599171324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca25fba006b740f2a86fe10e4abe9400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5bcacdd-b7", "ovs_interfaceid": "d5bcacdd-b774-4af1-aa33-c5f4ec9198f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.312480] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Task: {'id': task-1122603, 'name': Rename_Task, 'duration_secs': 0.119336} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.312777] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1224.313060] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-39e4bf34-3ab1-4a49-984c-1053ad1b60bb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.319189] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Waiting for the task: (returnval){ [ 1224.319189] env[62070]: value = "task-1122604" [ 1224.319189] env[62070]: _type = "Task" [ 1224.319189] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.326885] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Task: {'id': task-1122604, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.442887] env[62070]: DEBUG oslo_concurrency.lockutils [req-0526fc53-3979-41c3-baf5-01bc75b223c2 req-7a45d946-e703-444e-91b7-ce6975fd08a9 service nova] Releasing lock "refresh_cache-229bb9ef-b8d8-40cb-a589-3aa280b904d7" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1224.828961] env[62070]: DEBUG oslo_vmware.api [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Task: {'id': task-1122604, 'name': PowerOnVM_Task, 'duration_secs': 0.462676} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.829271] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1224.829481] env[62070]: INFO nova.compute.manager [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Took 5.79 seconds to spawn the instance on the hypervisor. [ 1224.829695] env[62070]: DEBUG nova.compute.manager [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1224.830451] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95cc3a9a-c32d-4739-86f3-cec9fa605acf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.347624] env[62070]: INFO nova.compute.manager [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Took 10.46 seconds to build instance. [ 1225.849216] env[62070]: DEBUG nova.compute.manager [None req-7ff85566-4968-41a1-b363-b7a69066965b tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1225.849908] env[62070]: DEBUG oslo_concurrency.lockutils [None req-b26a854f-3048-40e6-87b8-aaa80dfb8c88 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Lock "3d7d7826-8690-4618-ac77-77e08afc6596" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.973s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1225.851098] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c39d71ca-53f2-4d10-be17-caa1194797b7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.907461] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Acquiring lock "3d7d7826-8690-4618-ac77-77e08afc6596" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1225.907743] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Lock "3d7d7826-8690-4618-ac77-77e08afc6596" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1225.907956] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Acquiring lock "3d7d7826-8690-4618-ac77-77e08afc6596-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1225.908160] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Lock "3d7d7826-8690-4618-ac77-77e08afc6596-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1225.908332] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Lock "3d7d7826-8690-4618-ac77-77e08afc6596-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1225.910325] env[62070]: INFO nova.compute.manager [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Terminating instance [ 1225.911989] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Acquiring lock "refresh_cache-3d7d7826-8690-4618-ac77-77e08afc6596" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1225.912165] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Acquired lock "refresh_cache-3d7d7826-8690-4618-ac77-77e08afc6596" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1225.912334] env[62070]: DEBUG nova.network.neutron [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1226.364362] env[62070]: INFO nova.compute.manager [None req-7ff85566-4968-41a1-b363-b7a69066965b tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] instance snapshotting [ 1226.365070] env[62070]: DEBUG nova.objects.instance [None req-7ff85566-4968-41a1-b363-b7a69066965b tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Lazy-loading 'flavor' on Instance uuid 3d7d7826-8690-4618-ac77-77e08afc6596 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1226.429976] env[62070]: DEBUG nova.network.neutron [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1226.487719] env[62070]: DEBUG nova.network.neutron [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1226.870427] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bebc29d1-2b12-46a4-94db-624604dcd5df {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.886659] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-969e72f9-d4c1-411f-b19b-09d9c10eb7c0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.990875] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Releasing lock "refresh_cache-3d7d7826-8690-4618-ac77-77e08afc6596" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1226.991349] env[62070]: DEBUG nova.compute.manager [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1226.991573] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1226.992481] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b9b02c8-bf27-4187-b7a2-8719a29b5667 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.999486] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1226.999735] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bcca8ba0-cb66-4184-a3e1-2b4aa05d9575 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.005369] env[62070]: DEBUG oslo_vmware.api [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Waiting for the task: (returnval){ [ 1227.005369] env[62070]: value = "task-1122605" [ 1227.005369] env[62070]: _type = "Task" [ 1227.005369] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.013687] env[62070]: DEBUG oslo_vmware.api [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Task: {'id': task-1122605, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.395887] env[62070]: DEBUG nova.compute.manager [None req-7ff85566-4968-41a1-b363-b7a69066965b tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Instance disappeared during snapshot {{(pid=62070) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4494}} [ 1227.515404] env[62070]: DEBUG oslo_vmware.api [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Task: {'id': task-1122605, 'name': PowerOffVM_Task, 'duration_secs': 0.267802} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.516268] env[62070]: DEBUG nova.compute.manager [None req-7ff85566-4968-41a1-b363-b7a69066965b tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Found 0 images (rotation: 2) {{(pid=62070) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 1227.517517] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1227.517517] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1227.518532] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-89d5807a-559c-46c7-9706-2643630a48c0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.542338] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1227.542557] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1227.542740] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Deleting the datastore file [datastore1] 3d7d7826-8690-4618-ac77-77e08afc6596 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1227.542993] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a997ed48-d040-4b11-88c3-51f2cf5f21c6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.549386] env[62070]: DEBUG oslo_vmware.api [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Waiting for the task: (returnval){ [ 1227.549386] env[62070]: value = "task-1122607" [ 1227.549386] env[62070]: _type = "Task" [ 1227.549386] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.556830] env[62070]: DEBUG oslo_vmware.api [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Task: {'id': task-1122607, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.058898] env[62070]: DEBUG oslo_vmware.api [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Task: {'id': task-1122607, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.397664} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.059201] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1228.059392] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1228.059571] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1228.059758] env[62070]: INFO nova.compute.manager [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1228.059991] env[62070]: DEBUG oslo.service.loopingcall [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1228.060203] env[62070]: DEBUG nova.compute.manager [-] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1228.060298] env[62070]: DEBUG nova.network.neutron [-] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1228.075268] env[62070]: DEBUG nova.network.neutron [-] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1228.577700] env[62070]: DEBUG nova.network.neutron [-] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1229.079878] env[62070]: INFO nova.compute.manager [-] [instance: 3d7d7826-8690-4618-ac77-77e08afc6596] Took 1.02 seconds to deallocate network for instance. [ 1229.586373] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1229.586811] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1229.586865] env[62070]: DEBUG nova.objects.instance [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Lazy-loading 'resources' on Instance uuid 3d7d7826-8690-4618-ac77-77e08afc6596 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1230.145706] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8909783c-9b0a-4c9e-b363-54cb877cfe51 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.153310] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1302695e-8e3f-4404-8c61-f1f054c5d8f0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.182256] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f102cc-e464-4552-bb54-b3c55e1d487e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.189445] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ca6951-b0db-4be5-94a9-6d99885e6f0b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.202529] env[62070]: DEBUG nova.compute.provider_tree [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1230.349898] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5e12495c-062b-4eb1-b5e4-9786a5502ef4 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "689ba5a3-2253-4dc1-a47b-db152f86abd3" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1230.350102] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5e12495c-062b-4eb1-b5e4-9786a5502ef4 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "689ba5a3-2253-4dc1-a47b-db152f86abd3" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.705717] env[62070]: DEBUG nova.scheduler.client.report [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1230.853191] env[62070]: DEBUG nova.compute.utils [None req-5e12495c-062b-4eb1-b5e4-9786a5502ef4 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1231.210586] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.624s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1231.230609] env[62070]: INFO nova.scheduler.client.report [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Deleted allocations for instance 3d7d7826-8690-4618-ac77-77e08afc6596 [ 1231.356660] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5e12495c-062b-4eb1-b5e4-9786a5502ef4 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "689ba5a3-2253-4dc1-a47b-db152f86abd3" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1231.738339] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0b9359b3-9879-4e3d-b000-0060bf3a36e7 tempest-ServersAaction247Test-491916567 tempest-ServersAaction247Test-491916567-project-member] Lock "3d7d7826-8690-4618-ac77-77e08afc6596" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.830s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1232.508438] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5e12495c-062b-4eb1-b5e4-9786a5502ef4 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "689ba5a3-2253-4dc1-a47b-db152f86abd3" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1232.508694] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5e12495c-062b-4eb1-b5e4-9786a5502ef4 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "689ba5a3-2253-4dc1-a47b-db152f86abd3" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1232.508940] env[62070]: INFO nova.compute.manager [None req-5e12495c-062b-4eb1-b5e4-9786a5502ef4 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Attaching volume 2a41b841-57c5-41b3-900a-b550e6db4174 to /dev/sdb [ 1232.540568] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96303890-9696-4a3c-b96b-eeed7e127a33 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.547669] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73801bc4-5ff5-40e8-a3e1-f9aec6d6453a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.560385] env[62070]: DEBUG nova.virt.block_device [None req-5e12495c-062b-4eb1-b5e4-9786a5502ef4 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Updating existing volume attachment record: eb6f9820-3fe3-4d9d-8f58-91eaebc15227 {{(pid=62070) _volume_attach /opt/stack/nova/nova/virt/block_device.py:679}} [ 1233.458994] env[62070]: DEBUG oslo_concurrency.lockutils [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquiring lock "2502f29e-be96-497a-b98c-61e4d0a4e817" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1233.459278] env[62070]: DEBUG oslo_concurrency.lockutils [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Lock "2502f29e-be96-497a-b98c-61e4d0a4e817" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1233.624960] env[62070]: DEBUG oslo_concurrency.lockutils [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquiring lock "db83dddb-25dd-44ba-9046-53ba09b4db92" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1233.625284] env[62070]: DEBUG oslo_concurrency.lockutils [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Lock "db83dddb-25dd-44ba-9046-53ba09b4db92" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1233.961692] env[62070]: DEBUG nova.compute.manager [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1234.127948] env[62070]: DEBUG nova.compute.manager [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Starting instance... {{(pid=62070) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1234.482944] env[62070]: DEBUG oslo_concurrency.lockutils [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.483236] env[62070]: DEBUG oslo_concurrency.lockutils [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1234.484717] env[62070]: INFO nova.compute.claims [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1234.646128] env[62070]: DEBUG oslo_concurrency.lockutils [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1235.550101] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c8d822d-bab9-4a44-acce-09bbff44d451 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.557782] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5bced2-2ed3-42e2-a51b-06160e70b52a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.586316] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d0d01d-58ee-4d83-9638-f77de868241b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.593189] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-257a2299-c265-4906-9828-1969164bc311 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.605635] env[62070]: DEBUG nova.compute.provider_tree [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1236.108409] env[62070]: DEBUG nova.scheduler.client.report [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1236.614337] env[62070]: DEBUG oslo_concurrency.lockutils [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.131s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1236.614842] env[62070]: DEBUG nova.compute.manager [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1236.617693] env[62070]: DEBUG oslo_concurrency.lockutils [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.971s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1236.619083] env[62070]: INFO nova.compute.claims [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1237.107358] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e12495c-062b-4eb1-b5e4-9786a5502ef4 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Volume attach. Driver type: vmdk {{(pid=62070) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1237.107604] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e12495c-062b-4eb1-b5e4-9786a5502ef4 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245537', 'volume_id': '2a41b841-57c5-41b3-900a-b550e6db4174', 'name': 'volume-2a41b841-57c5-41b3-900a-b550e6db4174', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '689ba5a3-2253-4dc1-a47b-db152f86abd3', 'attached_at': '', 'detached_at': '', 'volume_id': '2a41b841-57c5-41b3-900a-b550e6db4174', 'serial': '2a41b841-57c5-41b3-900a-b550e6db4174'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1237.108554] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3262788-5fb8-48cf-92a2-e2f320fdb2bb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.126217] env[62070]: DEBUG nova.compute.utils [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1237.129639] env[62070]: DEBUG nova.compute.manager [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Not allocating networking since 'none' was specified. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1237.130370] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed2e74a1-d043-466b-bd31-6c300525a4ad {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.154733] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e12495c-062b-4eb1-b5e4-9786a5502ef4 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] volume-2a41b841-57c5-41b3-900a-b550e6db4174/volume-2a41b841-57c5-41b3-900a-b550e6db4174.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1237.155971] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-948993da-6574-4110-969b-3ca309e2f098 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.174831] env[62070]: DEBUG oslo_vmware.api [None req-5e12495c-062b-4eb1-b5e4-9786a5502ef4 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1237.174831] env[62070]: value = "task-1122610" [ 1237.174831] env[62070]: _type = "Task" [ 1237.174831] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.183024] env[62070]: DEBUG oslo_vmware.api [None req-5e12495c-062b-4eb1-b5e4-9786a5502ef4 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122610, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.630986] env[62070]: DEBUG nova.compute.manager [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1237.684239] env[62070]: DEBUG oslo_vmware.api [None req-5e12495c-062b-4eb1-b5e4-9786a5502ef4 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122610, 'name': ReconfigVM_Task, 'duration_secs': 0.32906} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.686646] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e12495c-062b-4eb1-b5e4-9786a5502ef4 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Reconfigured VM instance instance-0000006f to attach disk [datastore2] volume-2a41b841-57c5-41b3-900a-b550e6db4174/volume-2a41b841-57c5-41b3-900a-b550e6db4174.vmdk or device None with type thin {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1237.691692] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09616227-030b-4f74-b058-1011810c1e86 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.702877] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e5ba060-23d7-4ea1-9121-adf4e178fd82 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.711871] env[62070]: DEBUG oslo_vmware.api [None req-5e12495c-062b-4eb1-b5e4-9786a5502ef4 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1237.711871] env[62070]: value = "task-1122611" [ 1237.711871] env[62070]: _type = "Task" [ 1237.711871] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.712794] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee217fc3-06f3-4568-8f1d-32e2fe0ee41b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.723655] env[62070]: DEBUG oslo_vmware.api [None req-5e12495c-062b-4eb1-b5e4-9786a5502ef4 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122611, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.750399] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9400e285-2457-431c-b0bc-34033b738b54 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.757416] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a691df1-2294-4357-9132-b5783ea30027 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.770787] env[62070]: DEBUG nova.compute.provider_tree [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1238.225745] env[62070]: DEBUG oslo_vmware.api [None req-5e12495c-062b-4eb1-b5e4-9786a5502ef4 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122611, 'name': ReconfigVM_Task, 'duration_secs': 0.128916} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.226065] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e12495c-062b-4eb1-b5e4-9786a5502ef4 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245537', 'volume_id': '2a41b841-57c5-41b3-900a-b550e6db4174', 'name': 'volume-2a41b841-57c5-41b3-900a-b550e6db4174', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '689ba5a3-2253-4dc1-a47b-db152f86abd3', 'attached_at': '', 'detached_at': '', 'volume_id': '2a41b841-57c5-41b3-900a-b550e6db4174', 'serial': '2a41b841-57c5-41b3-900a-b550e6db4174'} {{(pid=62070) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1238.273526] env[62070]: DEBUG nova.scheduler.client.report [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1238.600257] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1238.600496] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1238.642080] env[62070]: DEBUG nova.compute.manager [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1238.667769] env[62070]: DEBUG nova.virt.hardware [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1238.668061] env[62070]: DEBUG nova.virt.hardware [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1238.668235] env[62070]: DEBUG nova.virt.hardware [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1238.668425] env[62070]: DEBUG nova.virt.hardware [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1238.668575] env[62070]: DEBUG nova.virt.hardware [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1238.668727] env[62070]: DEBUG nova.virt.hardware [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1238.668935] env[62070]: DEBUG nova.virt.hardware [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1238.669124] env[62070]: DEBUG nova.virt.hardware [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1238.669307] env[62070]: DEBUG nova.virt.hardware [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1238.669476] env[62070]: DEBUG nova.virt.hardware [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1238.669656] env[62070]: DEBUG nova.virt.hardware [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1238.670846] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-465900ec-711b-48c6-8ca0-fa1ca459a1b9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.678429] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e105d279-64cc-419c-a14a-68f5ea97e334 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.691459] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Instance VIF info [] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1238.696771] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Creating folder: Project (b884500b9a84405d89d76b544cc13598). Parent ref: group-v245319. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1238.697036] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8260abd-3e69-4e7a-9d6d-44a8c7faba6d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.706659] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Created folder: Project (b884500b9a84405d89d76b544cc13598) in parent group-v245319. [ 1238.706843] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Creating folder: Instances. Parent ref: group-v245538. {{(pid=62070) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1238.707065] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bd3bfc0b-1e9c-4298-8ea6-8bdf8c449523 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.715111] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Created folder: Instances in parent group-v245538. [ 1238.715335] env[62070]: DEBUG oslo.service.loopingcall [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1238.715515] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1238.715695] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-379e242d-88c3-491e-9bf6-d3d3cb66704b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.731153] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1238.731153] env[62070]: value = "task-1122614" [ 1238.731153] env[62070]: _type = "Task" [ 1238.731153] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.737761] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122614, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.778155] env[62070]: DEBUG oslo_concurrency.lockutils [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.161s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1238.778685] env[62070]: DEBUG nova.compute.manager [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Start building networks asynchronously for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1239.105100] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1239.105264] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Starting heal instance info cache {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1239.105371] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Rebuilding the list of instances to heal {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1239.240789] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122614, 'name': CreateVM_Task} progress is 25%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.264195] env[62070]: DEBUG nova.objects.instance [None req-5e12495c-062b-4eb1-b5e4-9786a5502ef4 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lazy-loading 'flavor' on Instance uuid 689ba5a3-2253-4dc1-a47b-db152f86abd3 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1239.284672] env[62070]: DEBUG nova.compute.utils [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Using /dev/sd instead of None {{(pid=62070) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1239.285962] env[62070]: DEBUG nova.compute.manager [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Not allocating networking since 'none' was specified. {{(pid=62070) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1239.608613] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Skipping network cache update for instance because it is Building. {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1239.608808] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Skipping network cache update for instance because it is Building. {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1239.634521] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "refresh_cache-689ba5a3-2253-4dc1-a47b-db152f86abd3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1239.634660] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquired lock "refresh_cache-689ba5a3-2253-4dc1-a47b-db152f86abd3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.634798] env[62070]: DEBUG nova.network.neutron [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Forcefully refreshing network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1239.634965] env[62070]: DEBUG nova.objects.instance [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lazy-loading 'info_cache' on Instance uuid 689ba5a3-2253-4dc1-a47b-db152f86abd3 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1239.741165] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122614, 'name': CreateVM_Task, 'duration_secs': 0.805736} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.741513] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1239.741728] env[62070]: DEBUG oslo_concurrency.lockutils [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1239.741894] env[62070]: DEBUG oslo_concurrency.lockutils [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.742237] env[62070]: DEBUG oslo_concurrency.lockutils [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1239.742483] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecda8395-6ec8-4ecf-bbe4-c4d7ca213387 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.746538] env[62070]: DEBUG oslo_vmware.api [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1239.746538] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]521c3052-3fe8-9ad9-2197-62e2fb5d2264" [ 1239.746538] env[62070]: _type = "Task" [ 1239.746538] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.753703] env[62070]: DEBUG oslo_vmware.api [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]521c3052-3fe8-9ad9-2197-62e2fb5d2264, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.769181] env[62070]: DEBUG oslo_concurrency.lockutils [None req-5e12495c-062b-4eb1-b5e4-9786a5502ef4 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "689ba5a3-2253-4dc1-a47b-db152f86abd3" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.260s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.787753] env[62070]: DEBUG nova.compute.manager [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Start building block device mappings for instance. {{(pid=62070) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1239.921598] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fc01132b-c011-4cfb-b5b0-b2b63b48e83b tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "689ba5a3-2253-4dc1-a47b-db152f86abd3" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1239.921824] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fc01132b-c011-4cfb-b5b0-b2b63b48e83b tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "689ba5a3-2253-4dc1-a47b-db152f86abd3" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1240.256942] env[62070]: DEBUG oslo_vmware.api [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]521c3052-3fe8-9ad9-2197-62e2fb5d2264, 'name': SearchDatastore_Task, 'duration_secs': 0.009273} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.257205] env[62070]: DEBUG oslo_concurrency.lockutils [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1240.257446] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1240.257682] env[62070]: DEBUG oslo_concurrency.lockutils [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1240.257833] env[62070]: DEBUG oslo_concurrency.lockutils [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.258055] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1240.258317] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-484cd604-b3f6-45eb-aaa9-cec9f524c861 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.266381] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1240.266553] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1240.267228] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fee3492-5d4d-4de4-8bcf-3d7f6cc75ad6 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.272095] env[62070]: DEBUG oslo_vmware.api [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1240.272095] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]5207be7b-a572-12e7-2b3c-bc2b1a65e354" [ 1240.272095] env[62070]: _type = "Task" [ 1240.272095] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.279722] env[62070]: DEBUG oslo_vmware.api [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5207be7b-a572-12e7-2b3c-bc2b1a65e354, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.424452] env[62070]: INFO nova.compute.manager [None req-fc01132b-c011-4cfb-b5b0-b2b63b48e83b tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Detaching volume 2a41b841-57c5-41b3-900a-b550e6db4174 [ 1240.453757] env[62070]: INFO nova.virt.block_device [None req-fc01132b-c011-4cfb-b5b0-b2b63b48e83b tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Attempting to driver detach volume 2a41b841-57c5-41b3-900a-b550e6db4174 from mountpoint /dev/sdb [ 1240.454012] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc01132b-c011-4cfb-b5b0-b2b63b48e83b tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Volume detach. Driver type: vmdk {{(pid=62070) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1240.454216] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc01132b-c011-4cfb-b5b0-b2b63b48e83b tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245537', 'volume_id': '2a41b841-57c5-41b3-900a-b550e6db4174', 'name': 'volume-2a41b841-57c5-41b3-900a-b550e6db4174', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '689ba5a3-2253-4dc1-a47b-db152f86abd3', 'attached_at': '', 'detached_at': '', 'volume_id': '2a41b841-57c5-41b3-900a-b550e6db4174', 'serial': '2a41b841-57c5-41b3-900a-b550e6db4174'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1240.455114] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-640cb0b3-f5ca-462b-9b0b-052c9530ea8e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.476562] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681449b8-12d8-4b8b-a7d9-bf0d832ed379 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.483386] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3042338-83c0-40fa-82d6-e78b9d29630b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.503027] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81443635-b9e0-4ce6-b951-5f111459cd51 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.517277] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc01132b-c011-4cfb-b5b0-b2b63b48e83b tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] The volume has not been displaced from its original location: [datastore2] volume-2a41b841-57c5-41b3-900a-b550e6db4174/volume-2a41b841-57c5-41b3-900a-b550e6db4174.vmdk. No consolidation needed. {{(pid=62070) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1240.522575] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc01132b-c011-4cfb-b5b0-b2b63b48e83b tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Reconfiguring VM instance instance-0000006f to detach disk 2001 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1240.522882] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5193727d-ec10-4c4e-8696-4e04addb7f1a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.540048] env[62070]: DEBUG oslo_vmware.api [None req-fc01132b-c011-4cfb-b5b0-b2b63b48e83b tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1240.540048] env[62070]: value = "task-1122615" [ 1240.540048] env[62070]: _type = "Task" [ 1240.540048] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.547465] env[62070]: DEBUG oslo_vmware.api [None req-fc01132b-c011-4cfb-b5b0-b2b63b48e83b tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122615, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.781987] env[62070]: DEBUG oslo_vmware.api [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]5207be7b-a572-12e7-2b3c-bc2b1a65e354, 'name': SearchDatastore_Task, 'duration_secs': 0.008495} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.782749] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-439e0a0c-bcf5-4ffc-b4dc-d7bde8646ac3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.787317] env[62070]: DEBUG oslo_vmware.api [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1240.787317] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52240264-39e1-5a53-5431-45ef3b55ed99" [ 1240.787317] env[62070]: _type = "Task" [ 1240.787317] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.794431] env[62070]: DEBUG oslo_vmware.api [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52240264-39e1-5a53-5431-45ef3b55ed99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.795476] env[62070]: DEBUG nova.compute.manager [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Start spawning the instance on the hypervisor. {{(pid=62070) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1240.815218] env[62070]: DEBUG nova.virt.hardware [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1240.815475] env[62070]: DEBUG nova.virt.hardware [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1240.815636] env[62070]: DEBUG nova.virt.hardware [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1240.815821] env[62070]: DEBUG nova.virt.hardware [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1240.815975] env[62070]: DEBUG nova.virt.hardware [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1240.816143] env[62070]: DEBUG nova.virt.hardware [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1240.816358] env[62070]: DEBUG nova.virt.hardware [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1240.816523] env[62070]: DEBUG nova.virt.hardware [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1240.816697] env[62070]: DEBUG nova.virt.hardware [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1240.816865] env[62070]: DEBUG nova.virt.hardware [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1240.817061] env[62070]: DEBUG nova.virt.hardware [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1240.817827] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7631dff5-51c5-4ae4-9cc4-b90bc257bc58 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.824845] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7701d79-d67e-4fc4-92e8-0e657d4ba899 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.838835] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Instance VIF info [] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1240.844826] env[62070]: DEBUG oslo.service.loopingcall [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1240.845077] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1240.845284] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-572959cb-42c0-4e50-ae55-a4389bd438ed {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.861308] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1240.861308] env[62070]: value = "task-1122616" [ 1240.861308] env[62070]: _type = "Task" [ 1240.861308] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.868500] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122616, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.051354] env[62070]: DEBUG oslo_vmware.api [None req-fc01132b-c011-4cfb-b5b0-b2b63b48e83b tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122615, 'name': ReconfigVM_Task, 'duration_secs': 0.210708} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.051620] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc01132b-c011-4cfb-b5b0-b2b63b48e83b tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Reconfigured VM instance instance-0000006f to detach disk 2001 {{(pid=62070) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1241.056754] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7cf4f99-26c0-4de4-9524-ade7b49524af {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.072446] env[62070]: DEBUG oslo_vmware.api [None req-fc01132b-c011-4cfb-b5b0-b2b63b48e83b tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1241.072446] env[62070]: value = "task-1122617" [ 1241.072446] env[62070]: _type = "Task" [ 1241.072446] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.083220] env[62070]: DEBUG oslo_vmware.api [None req-fc01132b-c011-4cfb-b5b0-b2b63b48e83b tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122617, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.297193] env[62070]: DEBUG oslo_vmware.api [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52240264-39e1-5a53-5431-45ef3b55ed99, 'name': SearchDatastore_Task, 'duration_secs': 0.008661} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.299590] env[62070]: DEBUG oslo_concurrency.lockutils [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1241.299881] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 2502f29e-be96-497a-b98c-61e4d0a4e817/2502f29e-be96-497a-b98c-61e4d0a4e817.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1241.300169] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c8b20606-054b-4fe3-a630-459a9664192c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.306316] env[62070]: DEBUG oslo_vmware.api [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1241.306316] env[62070]: value = "task-1122618" [ 1241.306316] env[62070]: _type = "Task" [ 1241.306316] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.313723] env[62070]: DEBUG oslo_vmware.api [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122618, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.372054] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122616, 'name': CreateVM_Task, 'duration_secs': 0.24342} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.373036] env[62070]: DEBUG nova.network.neutron [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Updating instance_info_cache with network_info: [{"id": "f7325230-82ee-4433-865b-d8d7ced03602", "address": "fa:16:3e:c2:5b:0e", "network": {"id": "6ea9aade-1b40-4ce8-a502-14ff09a4ab40", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1617295069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.218", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "735d24ccc5614660a5b34d77af648f94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7325230-82", "ovs_interfaceid": "f7325230-82ee-4433-865b-d8d7ced03602", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1241.374160] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1241.374814] env[62070]: DEBUG oslo_concurrency.lockutils [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1241.375064] env[62070]: DEBUG oslo_concurrency.lockutils [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1241.375415] env[62070]: DEBUG oslo_concurrency.lockutils [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1241.375696] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48f9c2f6-7969-4f78-ae41-dd0c28098f66 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.380212] env[62070]: DEBUG oslo_vmware.api [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1241.380212] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52cc9b81-5657-7807-f942-e8568f82f6b2" [ 1241.380212] env[62070]: _type = "Task" [ 1241.380212] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.388623] env[62070]: DEBUG oslo_vmware.api [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52cc9b81-5657-7807-f942-e8568f82f6b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.583943] env[62070]: DEBUG oslo_vmware.api [None req-fc01132b-c011-4cfb-b5b0-b2b63b48e83b tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122617, 'name': ReconfigVM_Task, 'duration_secs': 0.1374} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.584312] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc01132b-c011-4cfb-b5b0-b2b63b48e83b tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-245537', 'volume_id': '2a41b841-57c5-41b3-900a-b550e6db4174', 'name': 'volume-2a41b841-57c5-41b3-900a-b550e6db4174', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '689ba5a3-2253-4dc1-a47b-db152f86abd3', 'attached_at': '', 'detached_at': '', 'volume_id': '2a41b841-57c5-41b3-900a-b550e6db4174', 'serial': '2a41b841-57c5-41b3-900a-b550e6db4174'} {{(pid=62070) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1241.816114] env[62070]: DEBUG oslo_vmware.api [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122618, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.418355} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.816478] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] 2502f29e-be96-497a-b98c-61e4d0a4e817/2502f29e-be96-497a-b98c-61e4d0a4e817.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1241.816574] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1241.816819] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a9cf5255-dc1d-4b24-8adc-2953aed2f8ff {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.822964] env[62070]: DEBUG oslo_vmware.api [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1241.822964] env[62070]: value = "task-1122619" [ 1241.822964] env[62070]: _type = "Task" [ 1241.822964] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.830481] env[62070]: DEBUG oslo_vmware.api [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122619, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.875441] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Releasing lock "refresh_cache-689ba5a3-2253-4dc1-a47b-db152f86abd3" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1241.876530] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Updated the network info_cache for instance {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1241.876530] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1241.876530] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1241.876530] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1241.876530] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1241.876530] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1241.876837] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1241.876837] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62070) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1241.876837] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1241.891448] env[62070]: DEBUG oslo_vmware.api [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52cc9b81-5657-7807-f942-e8568f82f6b2, 'name': SearchDatastore_Task, 'duration_secs': 0.007949} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.891728] env[62070]: DEBUG oslo_concurrency.lockutils [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1241.891949] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1241.892258] env[62070]: DEBUG oslo_concurrency.lockutils [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1241.892424] env[62070]: DEBUG oslo_concurrency.lockutils [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1241.892602] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1241.892853] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f7d1d143-26c5-4713-b942-8d0b689d9d54 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.901221] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1241.901402] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1241.902109] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f508db1-662f-47b1-a849-2183f17c92fa {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.906620] env[62070]: DEBUG oslo_vmware.api [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1241.906620] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52ddc9fc-12d6-5d6d-dd55-9919a2411bed" [ 1241.906620] env[62070]: _type = "Task" [ 1241.906620] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.913741] env[62070]: DEBUG oslo_vmware.api [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52ddc9fc-12d6-5d6d-dd55-9919a2411bed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.126291] env[62070]: DEBUG nova.objects.instance [None req-fc01132b-c011-4cfb-b5b0-b2b63b48e83b tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lazy-loading 'flavor' on Instance uuid 689ba5a3-2253-4dc1-a47b-db152f86abd3 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1242.332249] env[62070]: DEBUG oslo_vmware.api [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122619, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058479} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.332515] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1242.333289] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e33dd57-9c2a-4362-bc3a-ba0ac8be2953 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.351923] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] 2502f29e-be96-497a-b98c-61e4d0a4e817/2502f29e-be96-497a-b98c-61e4d0a4e817.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1242.352155] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b01c13b-5f06-4d4b-9235-94a78de9a333 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.370317] env[62070]: DEBUG oslo_vmware.api [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1242.370317] env[62070]: value = "task-1122620" [ 1242.370317] env[62070]: _type = "Task" [ 1242.370317] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.377558] env[62070]: DEBUG oslo_vmware.api [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122620, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.379412] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1242.379608] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1242.379770] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1242.379959] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62070) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1242.380666] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a9fee0-2073-40ed-83d6-b8fd0ded5c3d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.387251] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5733c9b2-e433-4aea-8b51-235b1a55402c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.401471] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dee1109-c617-4662-8bec-f60763309f39 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.410576] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0faf9d-8f91-449a-b487-b2308ae57fb5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.418633] env[62070]: DEBUG oslo_vmware.api [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52ddc9fc-12d6-5d6d-dd55-9919a2411bed, 'name': SearchDatastore_Task, 'duration_secs': 0.01937} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.443480] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181063MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=62070) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1242.443613] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1242.443788] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1242.445282] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed6b8b1a-57b9-4463-9e2c-601183a7bbf5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.450247] env[62070]: DEBUG oslo_vmware.api [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1242.450247] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52fcfcd1-862c-55bb-5cbb-f164ca4986c0" [ 1242.450247] env[62070]: _type = "Task" [ 1242.450247] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.457204] env[62070]: DEBUG oslo_vmware.api [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52fcfcd1-862c-55bb-5cbb-f164ca4986c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.880043] env[62070]: DEBUG oslo_vmware.api [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122620, 'name': ReconfigVM_Task, 'duration_secs': 0.266334} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.880313] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Reconfigured VM instance instance-00000072 to attach disk [datastore2] 2502f29e-be96-497a-b98c-61e4d0a4e817/2502f29e-be96-497a-b98c-61e4d0a4e817.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1242.880906] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-636a591f-cc1f-43e7-aa73-1ff5b524b3fc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.887377] env[62070]: DEBUG oslo_vmware.api [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1242.887377] env[62070]: value = "task-1122621" [ 1242.887377] env[62070]: _type = "Task" [ 1242.887377] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.894715] env[62070]: DEBUG oslo_vmware.api [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122621, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.959867] env[62070]: DEBUG oslo_vmware.api [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52fcfcd1-862c-55bb-5cbb-f164ca4986c0, 'name': SearchDatastore_Task, 'duration_secs': 0.0089} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.960658] env[62070]: DEBUG oslo_concurrency.lockutils [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1242.961157] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] db83dddb-25dd-44ba-9046-53ba09b4db92/db83dddb-25dd-44ba-9046-53ba09b4db92.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1242.961268] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc1f4ebf-a932-45a0-b97a-b4d049a5673b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.967686] env[62070]: DEBUG oslo_vmware.api [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1242.967686] env[62070]: value = "task-1122622" [ 1242.967686] env[62070]: _type = "Task" [ 1242.967686] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.975374] env[62070]: DEBUG oslo_vmware.api [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122622, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.133700] env[62070]: DEBUG oslo_concurrency.lockutils [None req-fc01132b-c011-4cfb-b5b0-b2b63b48e83b tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "689ba5a3-2253-4dc1-a47b-db152f86abd3" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.212s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1243.397701] env[62070]: DEBUG oslo_vmware.api [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122621, 'name': Rename_Task, 'duration_secs': 0.119999} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.398016] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1243.398362] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-57064b87-a118-4196-9f8b-6b0bdeb12d0d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.404908] env[62070]: DEBUG oslo_vmware.api [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1243.404908] env[62070]: value = "task-1122623" [ 1243.404908] env[62070]: _type = "Task" [ 1243.404908] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.412653] env[62070]: DEBUG oslo_vmware.api [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122623, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.474314] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 689ba5a3-2253-4dc1-a47b-db152f86abd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1243.474469] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 229bb9ef-b8d8-40cb-a589-3aa280b904d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1243.474594] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 2502f29e-be96-497a-b98c-61e4d0a4e817 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1243.474715] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance db83dddb-25dd-44ba-9046-53ba09b4db92 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1243.474893] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=62070) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1243.475040] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=62070) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1243.480353] env[62070]: DEBUG oslo_vmware.api [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122622, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.452518} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.480596] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] db83dddb-25dd-44ba-9046-53ba09b4db92/db83dddb-25dd-44ba-9046-53ba09b4db92.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1243.480809] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1243.481069] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ceedf619-a25f-43de-9d69-e64612a08571 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.487163] env[62070]: DEBUG oslo_vmware.api [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1243.487163] env[62070]: value = "task-1122624" [ 1243.487163] env[62070]: _type = "Task" [ 1243.487163] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.496512] env[62070]: DEBUG oslo_vmware.api [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122624, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.535921] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63ff5192-f0d4-482a-b2b3-7211e8d17df7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.543261] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1daf114-e3aa-4bb7-b63c-8ad37f5a50b7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.573470] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b758e4-5834-4f5c-8d3a-74af39eb4ffd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.580599] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f50fca4a-466a-40e0-9fe7-2d46b1540a15 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.593374] env[62070]: DEBUG nova.compute.provider_tree [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1243.917360] env[62070]: DEBUG oslo_vmware.api [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122623, 'name': PowerOnVM_Task, 'duration_secs': 0.42858} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.917687] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1243.917983] env[62070]: INFO nova.compute.manager [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Took 5.28 seconds to spawn the instance on the hypervisor. [ 1243.918253] env[62070]: DEBUG nova.compute.manager [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1243.919325] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfaf3ca3-933e-4946-9b6c-377ee769c7f1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.996244] env[62070]: DEBUG oslo_vmware.api [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122624, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061968} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.996490] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1243.997238] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee1dced-e778-4c19-8b21-f14cf0178c3e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.017416] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] db83dddb-25dd-44ba-9046-53ba09b4db92/db83dddb-25dd-44ba-9046-53ba09b4db92.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1244.017649] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e2f602b-f92c-4434-9fdf-5a3eda28521f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.036473] env[62070]: DEBUG oslo_vmware.api [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1244.036473] env[62070]: value = "task-1122625" [ 1244.036473] env[62070]: _type = "Task" [ 1244.036473] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.043612] env[62070]: DEBUG oslo_vmware.api [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122625, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.096655] env[62070]: DEBUG nova.scheduler.client.report [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1244.175041] env[62070]: DEBUG oslo_concurrency.lockutils [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "689ba5a3-2253-4dc1-a47b-db152f86abd3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1244.175287] env[62070]: DEBUG oslo_concurrency.lockutils [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "689ba5a3-2253-4dc1-a47b-db152f86abd3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1244.175500] env[62070]: DEBUG oslo_concurrency.lockutils [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "689ba5a3-2253-4dc1-a47b-db152f86abd3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1244.175689] env[62070]: DEBUG oslo_concurrency.lockutils [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "689ba5a3-2253-4dc1-a47b-db152f86abd3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1244.175864] env[62070]: DEBUG oslo_concurrency.lockutils [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "689ba5a3-2253-4dc1-a47b-db152f86abd3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1244.178226] env[62070]: INFO nova.compute.manager [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Terminating instance [ 1244.179964] env[62070]: DEBUG nova.compute.manager [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1244.180190] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1244.181036] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-269c0c64-fcbd-4451-b744-9a20a9af2cb5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.188159] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1244.188392] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a36617b5-8bd7-40a4-99e5-52dc046ba68e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.194011] env[62070]: DEBUG oslo_vmware.api [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1244.194011] env[62070]: value = "task-1122626" [ 1244.194011] env[62070]: _type = "Task" [ 1244.194011] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.201384] env[62070]: DEBUG oslo_vmware.api [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122626, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.436087] env[62070]: INFO nova.compute.manager [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Took 9.97 seconds to build instance. [ 1244.546217] env[62070]: DEBUG oslo_vmware.api [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122625, 'name': ReconfigVM_Task, 'duration_secs': 0.30285} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.546535] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Reconfigured VM instance instance-00000073 to attach disk [datastore2] db83dddb-25dd-44ba-9046-53ba09b4db92/db83dddb-25dd-44ba-9046-53ba09b4db92.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1244.547211] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d0858d7b-65f4-46e9-8e3b-20f5824a6a66 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.554840] env[62070]: DEBUG oslo_vmware.api [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1244.554840] env[62070]: value = "task-1122627" [ 1244.554840] env[62070]: _type = "Task" [ 1244.554840] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.563335] env[62070]: DEBUG oslo_vmware.api [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122627, 'name': Rename_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.601775] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62070) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1244.601994] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.158s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1244.703587] env[62070]: DEBUG oslo_vmware.api [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122626, 'name': PowerOffVM_Task, 'duration_secs': 0.438889} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.703841] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1244.704091] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1244.704380] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-73610c00-35b4-476c-a735-aba43e21b672 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.777162] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1244.777416] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1244.777605] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Deleting the datastore file [datastore2] 689ba5a3-2253-4dc1-a47b-db152f86abd3 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1244.777897] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3cfd4304-7051-4acb-b4db-016d3c3b3df0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.785819] env[62070]: DEBUG oslo_vmware.api [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for the task: (returnval){ [ 1244.785819] env[62070]: value = "task-1122629" [ 1244.785819] env[62070]: _type = "Task" [ 1244.785819] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.792892] env[62070]: DEBUG oslo_vmware.api [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122629, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.937728] env[62070]: DEBUG oslo_concurrency.lockutils [None req-068f41bc-f287-481f-8ce8-2a6d531e680f tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Lock "2502f29e-be96-497a-b98c-61e4d0a4e817" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.478s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1245.064677] env[62070]: DEBUG oslo_vmware.api [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122627, 'name': Rename_Task, 'duration_secs': 0.130363} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.064950] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1245.065209] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fcd03eed-6fcd-4289-9723-6d97501dc07e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.072044] env[62070]: DEBUG oslo_vmware.api [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1245.072044] env[62070]: value = "task-1122630" [ 1245.072044] env[62070]: _type = "Task" [ 1245.072044] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.079939] env[62070]: DEBUG oslo_vmware.api [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122630, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.295508] env[62070]: DEBUG oslo_vmware.api [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Task: {'id': task-1122629, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128722} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.295508] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1245.295711] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1245.295711] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1245.295884] env[62070]: INFO nova.compute.manager [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1245.296149] env[62070]: DEBUG oslo.service.loopingcall [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1245.296390] env[62070]: DEBUG nova.compute.manager [-] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1245.296507] env[62070]: DEBUG nova.network.neutron [-] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1245.581621] env[62070]: DEBUG oslo_vmware.api [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122630, 'name': PowerOnVM_Task, 'duration_secs': 0.430879} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.581919] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1245.582151] env[62070]: INFO nova.compute.manager [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Took 4.79 seconds to spawn the instance on the hypervisor. [ 1245.582361] env[62070]: DEBUG nova.compute.manager [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1245.583129] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd1accd3-71b0-4c2d-9eef-f95c1ef42cf8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.714230] env[62070]: DEBUG nova.compute.manager [req-55010b08-f9bc-4b39-a5e2-aff581eba773 req-3ccf81c9-c9b1-44c0-b571-010c4252b98e service nova] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Received event network-vif-deleted-f7325230-82ee-4433-865b-d8d7ced03602 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1245.714438] env[62070]: INFO nova.compute.manager [req-55010b08-f9bc-4b39-a5e2-aff581eba773 req-3ccf81c9-c9b1-44c0-b571-010c4252b98e service nova] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Neutron deleted interface f7325230-82ee-4433-865b-d8d7ced03602; detaching it from the instance and deleting it from the info cache [ 1245.714626] env[62070]: DEBUG nova.network.neutron [req-55010b08-f9bc-4b39-a5e2-aff581eba773 req-3ccf81c9-c9b1-44c0-b571-010c4252b98e service nova] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1246.098751] env[62070]: INFO nova.compute.manager [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Took 11.47 seconds to build instance. [ 1246.193192] env[62070]: DEBUG nova.network.neutron [-] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1246.217645] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d2237571-debf-48c3-812e-9872836fed6c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.227150] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db423777-ed7a-45cd-ba9a-e16cacd896f1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.253883] env[62070]: DEBUG nova.compute.manager [req-55010b08-f9bc-4b39-a5e2-aff581eba773 req-3ccf81c9-c9b1-44c0-b571-010c4252b98e service nova] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Detach interface failed, port_id=f7325230-82ee-4433-865b-d8d7ced03602, reason: Instance 689ba5a3-2253-4dc1-a47b-db152f86abd3 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1246.486402] env[62070]: INFO nova.compute.manager [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Rebuilding instance [ 1246.533518] env[62070]: DEBUG nova.compute.manager [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1246.534492] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c735b009-cba1-4bd2-9704-c2609f1a5607 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.600977] env[62070]: DEBUG oslo_concurrency.lockutils [None req-74ddd37a-0b6e-4990-a59e-be80e23caec0 tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Lock "db83dddb-25dd-44ba-9046-53ba09b4db92" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.975s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1246.695730] env[62070]: INFO nova.compute.manager [-] [instance: 689ba5a3-2253-4dc1-a47b-db152f86abd3] Took 1.40 seconds to deallocate network for instance. [ 1247.046103] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1247.046539] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-74377cc7-ecca-4921-8393-5c267c010e0e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.055354] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1247.055354] env[62070]: value = "task-1122631" [ 1247.055354] env[62070]: _type = "Task" [ 1247.055354] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.066550] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122631, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.203403] env[62070]: DEBUG oslo_concurrency.lockutils [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1247.203838] env[62070]: DEBUG oslo_concurrency.lockutils [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.204233] env[62070]: DEBUG nova.objects.instance [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lazy-loading 'resources' on Instance uuid 689ba5a3-2253-4dc1-a47b-db152f86abd3 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1247.565203] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122631, 'name': PowerOffVM_Task, 'duration_secs': 0.182672} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.565465] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1247.565685] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1247.566444] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03fafddc-2a09-4586-b267-0111a2c6ed4e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.572792] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1247.573025] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-53f3470d-6e1f-45d4-8519-4d5483779d47 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.595827] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1247.596054] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1247.596242] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Deleting the datastore file [datastore2] db83dddb-25dd-44ba-9046-53ba09b4db92 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1247.596483] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f0b18b0-d30d-44d7-88a8-ff8f7cb98e80 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.603158] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1247.603158] env[62070]: value = "task-1122633" [ 1247.603158] env[62070]: _type = "Task" [ 1247.603158] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.611645] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122633, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.765799] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7215aaf5-6cc1-4017-acd1-85dbf7738364 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.773043] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd4136ad-4b8a-4e8b-8b37-150b86e53c00 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.801883] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d096fb-a2e8-4a54-afa8-5c97ea0d1416 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.809289] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d898049e-9d2b-47bf-bb3f-161001bd67eb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.821943] env[62070]: DEBUG nova.compute.provider_tree [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1248.112500] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122633, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.097736} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.112756] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1248.112945] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1248.113147] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1248.324680] env[62070]: DEBUG nova.scheduler.client.report [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1248.829747] env[62070]: DEBUG oslo_concurrency.lockutils [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.626s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.847396] env[62070]: INFO nova.scheduler.client.report [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Deleted allocations for instance 689ba5a3-2253-4dc1-a47b-db152f86abd3 [ 1249.144333] env[62070]: DEBUG nova.virt.hardware [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-03T09:21:17Z,direct_url=,disk_format='vmdk',id=43ea607c-7ece-4601-9b11-75c6a16aa7dd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9d42cb2bbadf40d6b35f237f71234611',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-03T09:21:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1249.144605] env[62070]: DEBUG nova.virt.hardware [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1249.144769] env[62070]: DEBUG nova.virt.hardware [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1249.144957] env[62070]: DEBUG nova.virt.hardware [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1249.145472] env[62070]: DEBUG nova.virt.hardware [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1249.145667] env[62070]: DEBUG nova.virt.hardware [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1249.145888] env[62070]: DEBUG nova.virt.hardware [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1249.146073] env[62070]: DEBUG nova.virt.hardware [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1249.146249] env[62070]: DEBUG nova.virt.hardware [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1249.146416] env[62070]: DEBUG nova.virt.hardware [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1249.146592] env[62070]: DEBUG nova.virt.hardware [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1249.147758] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78f62f61-3c5d-4e97-be80-7a3c39392433 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.155489] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-554e51ba-7a47-4c9c-b95b-dab175f62385 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.168648] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Instance VIF info [] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1249.174124] env[62070]: DEBUG oslo.service.loopingcall [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1249.174368] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1249.174579] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a60747f6-1d57-47f9-b16a-9c8111422d15 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.190204] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1249.190204] env[62070]: value = "task-1122634" [ 1249.190204] env[62070]: _type = "Task" [ 1249.190204] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.197429] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122634, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.355399] env[62070]: DEBUG oslo_concurrency.lockutils [None req-91434deb-f672-4a1a-8379-320038c15ef5 tempest-AttachVolumeNegativeTest-303800157 tempest-AttachVolumeNegativeTest-303800157-project-member] Lock "689ba5a3-2253-4dc1-a47b-db152f86abd3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.180s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1249.700586] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122634, 'name': CreateVM_Task, 'duration_secs': 0.31582} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.700914] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1249.701163] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1249.701347] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1249.701680] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1249.701930] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bce489d1-6df7-4056-9250-15b54717bcd0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.706283] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1249.706283] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]525f398f-5d70-4863-705f-66322c3c9eb9" [ 1249.706283] env[62070]: _type = "Task" [ 1249.706283] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.713473] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]525f398f-5d70-4863-705f-66322c3c9eb9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.216809] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]525f398f-5d70-4863-705f-66322c3c9eb9, 'name': SearchDatastore_Task, 'duration_secs': 0.008951} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.217134] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1250.217376] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Processing image 43ea607c-7ece-4601-9b11-75c6a16aa7dd {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1250.217618] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1250.217785] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquired lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.218013] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1250.218290] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4448e22-a35b-46bd-a2e8-18a56d058fc5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.226025] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1250.226212] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1250.226875] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0f0dd08-ab0a-4c3b-aaba-1736b20a25be {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.232094] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1250.232094] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52b6de39-6df2-293c-7012-323f0dff3802" [ 1250.232094] env[62070]: _type = "Task" [ 1250.232094] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.239554] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52b6de39-6df2-293c-7012-323f0dff3802, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.742955] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52b6de39-6df2-293c-7012-323f0dff3802, 'name': SearchDatastore_Task, 'duration_secs': 0.008301} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.743338] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c546cce8-dbb5-4db2-bec0-c235a30b0c2b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.747769] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1250.747769] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52240761-ec20-4b8c-2ebd-de865bf35214" [ 1250.747769] env[62070]: _type = "Task" [ 1250.747769] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.754805] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52240761-ec20-4b8c-2ebd-de865bf35214, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.258472] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52240761-ec20-4b8c-2ebd-de865bf35214, 'name': SearchDatastore_Task, 'duration_secs': 0.008865} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.258691] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Releasing lock "[datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1251.258933] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] db83dddb-25dd-44ba-9046-53ba09b4db92/db83dddb-25dd-44ba-9046-53ba09b4db92.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1251.259206] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dcb5060c-cb2a-4c2f-b3d5-b454dd876ca3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.265815] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1251.265815] env[62070]: value = "task-1122636" [ 1251.265815] env[62070]: _type = "Task" [ 1251.265815] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.272575] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122636, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.776243] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122636, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.454533} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.776612] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/43ea607c-7ece-4601-9b11-75c6a16aa7dd/43ea607c-7ece-4601-9b11-75c6a16aa7dd.vmdk to [datastore2] db83dddb-25dd-44ba-9046-53ba09b4db92/db83dddb-25dd-44ba-9046-53ba09b4db92.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1251.776686] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Extending root virtual disk to 1048576 {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1251.777040] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fbcd45ef-ea15-4a2e-bbbc-0d67d13bed0b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.783302] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1251.783302] env[62070]: value = "task-1122637" [ 1251.783302] env[62070]: _type = "Task" [ 1251.783302] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.791869] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122637, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.293948] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122637, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.055726} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.294262] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Extended root virtual disk {{(pid=62070) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1252.295106] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9075654-3938-49ea-90a9-9c40db6754ea {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.313944] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] db83dddb-25dd-44ba-9046-53ba09b4db92/db83dddb-25dd-44ba-9046-53ba09b4db92.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1252.314263] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91ebbd8c-3e35-4b4a-be19-dd247f3bd5e4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.332830] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1252.332830] env[62070]: value = "task-1122638" [ 1252.332830] env[62070]: _type = "Task" [ 1252.332830] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.340344] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122638, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.844156] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122638, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.343739] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122638, 'name': ReconfigVM_Task, 'duration_secs': 0.822835} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.344061] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Reconfigured VM instance instance-00000073 to attach disk [datastore2] db83dddb-25dd-44ba-9046-53ba09b4db92/db83dddb-25dd-44ba-9046-53ba09b4db92.vmdk or device None with type sparse {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1253.344690] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eaabbdf2-515d-4789-96c2-464c513df423 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.350965] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1253.350965] env[62070]: value = "task-1122640" [ 1253.350965] env[62070]: _type = "Task" [ 1253.350965] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.358371] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122640, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.860946] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122640, 'name': Rename_Task, 'duration_secs': 0.234556} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.861334] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1253.861500] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1e796a67-5500-433e-a05a-7e8c62dcae20 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.867162] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1253.867162] env[62070]: value = "task-1122641" [ 1253.867162] env[62070]: _type = "Task" [ 1253.867162] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.874408] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122641, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.377201] env[62070]: DEBUG oslo_vmware.api [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122641, 'name': PowerOnVM_Task, 'duration_secs': 0.472851} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.377495] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1254.377700] env[62070]: DEBUG nova.compute.manager [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1254.378525] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c9c7bd5-cd1d-49db-adcb-b9d928113193 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.894855] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1254.895102] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1254.895292] env[62070]: DEBUG nova.objects.instance [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62070) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1255.224885] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquiring lock "db83dddb-25dd-44ba-9046-53ba09b4db92" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1255.225121] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Lock "db83dddb-25dd-44ba-9046-53ba09b4db92" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1255.225335] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquiring lock "db83dddb-25dd-44ba-9046-53ba09b4db92-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1255.225526] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Lock "db83dddb-25dd-44ba-9046-53ba09b4db92-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1255.225702] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Lock "db83dddb-25dd-44ba-9046-53ba09b4db92-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1255.227727] env[62070]: INFO nova.compute.manager [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Terminating instance [ 1255.229395] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquiring lock "refresh_cache-db83dddb-25dd-44ba-9046-53ba09b4db92" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1255.229556] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquired lock "refresh_cache-db83dddb-25dd-44ba-9046-53ba09b4db92" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1255.229769] env[62070]: DEBUG nova.network.neutron [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1255.747520] env[62070]: DEBUG nova.network.neutron [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1255.810239] env[62070]: DEBUG nova.network.neutron [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.903304] env[62070]: DEBUG oslo_concurrency.lockutils [None req-9f7f3755-81f8-4349-aa7b-1037ac03323e tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1256.313515] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Releasing lock "refresh_cache-db83dddb-25dd-44ba-9046-53ba09b4db92" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1256.313989] env[62070]: DEBUG nova.compute.manager [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1256.314236] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1256.315209] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2ee2a6c-d3a9-4e58-95c3-939d13c36404 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.323483] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1256.323764] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8b3e1576-1966-4343-b111-65f05119bfdb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.329844] env[62070]: DEBUG oslo_vmware.api [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1256.329844] env[62070]: value = "task-1122643" [ 1256.329844] env[62070]: _type = "Task" [ 1256.329844] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.337305] env[62070]: DEBUG oslo_vmware.api [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122643, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.839667] env[62070]: DEBUG oslo_vmware.api [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122643, 'name': PowerOffVM_Task, 'duration_secs': 0.178328} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.840122] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1256.840311] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1256.840565] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c1c002c9-aba3-4a4f-9e3d-3d0729801bba {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.864397] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1256.864625] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1256.864826] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Deleting the datastore file [datastore2] db83dddb-25dd-44ba-9046-53ba09b4db92 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1256.865157] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c069e8f5-ad4b-4cb7-9ab1-37492ffe6799 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.871757] env[62070]: DEBUG oslo_vmware.api [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1256.871757] env[62070]: value = "task-1122645" [ 1256.871757] env[62070]: _type = "Task" [ 1256.871757] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.879877] env[62070]: DEBUG oslo_vmware.api [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122645, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.381505] env[62070]: DEBUG oslo_vmware.api [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122645, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.085695} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.382888] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1257.382888] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1257.382888] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1257.382888] env[62070]: INFO nova.compute.manager [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1257.383122] env[62070]: DEBUG oslo.service.loopingcall [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1257.383265] env[62070]: DEBUG nova.compute.manager [-] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1257.383364] env[62070]: DEBUG nova.network.neutron [-] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1257.398045] env[62070]: DEBUG nova.network.neutron [-] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1257.900955] env[62070]: DEBUG nova.network.neutron [-] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1258.403817] env[62070]: INFO nova.compute.manager [-] [instance: db83dddb-25dd-44ba-9046-53ba09b4db92] Took 1.02 seconds to deallocate network for instance. [ 1258.913120] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1258.913361] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1258.913603] env[62070]: DEBUG nova.objects.instance [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Lazy-loading 'resources' on Instance uuid db83dddb-25dd-44ba-9046-53ba09b4db92 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1259.469126] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35af36ab-3bb5-48c6-ac0a-598d8e42ed6d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.477065] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d715637-787c-46f3-978a-2876b2d9ff5c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.506884] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e0d9404-1946-423a-9b55-f0b0ae38a37d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.514078] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e99c07-338a-4f17-94d4-c649a40bf23e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.527079] env[62070]: DEBUG nova.compute.provider_tree [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1260.030475] env[62070]: DEBUG nova.scheduler.client.report [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1260.536013] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.623s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1260.556743] env[62070]: INFO nova.scheduler.client.report [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Deleted allocations for instance db83dddb-25dd-44ba-9046-53ba09b4db92 [ 1261.065017] env[62070]: DEBUG oslo_concurrency.lockutils [None req-d27a1d2b-989b-419a-a153-849043e170bf tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Lock "db83dddb-25dd-44ba-9046-53ba09b4db92" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.840s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1261.172335] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "229bb9ef-b8d8-40cb-a589-3aa280b904d7" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1261.172765] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "229bb9ef-b8d8-40cb-a589-3aa280b904d7" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1261.173011] env[62070]: INFO nova.compute.manager [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Shelving [ 1261.680871] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1261.681266] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f92e32a1-b442-41b5-b100-79a3d0ebfd67 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.688590] env[62070]: DEBUG oslo_vmware.api [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1261.688590] env[62070]: value = "task-1122646" [ 1261.688590] env[62070]: _type = "Task" [ 1261.688590] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.697414] env[62070]: DEBUG oslo_vmware.api [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122646, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.730158] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquiring lock "2502f29e-be96-497a-b98c-61e4d0a4e817" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1261.730431] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Lock "2502f29e-be96-497a-b98c-61e4d0a4e817" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1261.730653] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquiring lock "2502f29e-be96-497a-b98c-61e4d0a4e817-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1261.730845] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Lock "2502f29e-be96-497a-b98c-61e4d0a4e817-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1261.731055] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Lock "2502f29e-be96-497a-b98c-61e4d0a4e817-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1261.733148] env[62070]: INFO nova.compute.manager [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Terminating instance [ 1261.734742] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquiring lock "refresh_cache-2502f29e-be96-497a-b98c-61e4d0a4e817" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1261.734924] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquired lock "refresh_cache-2502f29e-be96-497a-b98c-61e4d0a4e817" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1261.735110] env[62070]: DEBUG nova.network.neutron [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1262.198647] env[62070]: DEBUG oslo_vmware.api [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122646, 'name': PowerOffVM_Task, 'duration_secs': 0.195368} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.198875] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1262.199653] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87115a07-680f-42cd-94e4-1f297b0c0fd7 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.216944] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b30e64b-706b-4ff3-a9b6-b837ef157f44 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.251550] env[62070]: DEBUG nova.network.neutron [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1262.295963] env[62070]: DEBUG nova.network.neutron [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1262.726733] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Creating Snapshot of the VM instance {{(pid=62070) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1262.727148] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1a12859d-3693-4982-8879-0aa32209a38c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.735519] env[62070]: DEBUG oslo_vmware.api [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1262.735519] env[62070]: value = "task-1122647" [ 1262.735519] env[62070]: _type = "Task" [ 1262.735519] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.743546] env[62070]: DEBUG oslo_vmware.api [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122647, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.798403] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Releasing lock "refresh_cache-2502f29e-be96-497a-b98c-61e4d0a4e817" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1262.798829] env[62070]: DEBUG nova.compute.manager [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1262.799040] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1262.799926] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aed66e0-d5ad-4fc4-9070-d11cd80b0c69 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.807063] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1262.807306] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-49d35239-b999-4a6e-b516-3815fbb04369 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.812512] env[62070]: DEBUG oslo_vmware.api [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1262.812512] env[62070]: value = "task-1122648" [ 1262.812512] env[62070]: _type = "Task" [ 1262.812512] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.819858] env[62070]: DEBUG oslo_vmware.api [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122648, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.245836] env[62070]: DEBUG oslo_vmware.api [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122647, 'name': CreateSnapshot_Task, 'duration_secs': 0.388946} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.246136] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Created Snapshot of the VM instance {{(pid=62070) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1263.246866] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-460e7125-ec9b-4849-b10c-bec069827403 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.321530] env[62070]: DEBUG oslo_vmware.api [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122648, 'name': PowerOffVM_Task, 'duration_secs': 0.190056} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.321787] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1263.321973] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1263.322245] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f362fa83-742e-4d64-9274-306a52f337dd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.343954] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1263.344204] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1263.344364] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Deleting the datastore file [datastore2] 2502f29e-be96-497a-b98c-61e4d0a4e817 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1263.344609] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a4c88e64-a0eb-449d-9c2f-edb1d3ba12dd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.350733] env[62070]: DEBUG oslo_vmware.api [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for the task: (returnval){ [ 1263.350733] env[62070]: value = "task-1122650" [ 1263.350733] env[62070]: _type = "Task" [ 1263.350733] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.359401] env[62070]: DEBUG oslo_vmware.api [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122650, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.763919] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Creating linked-clone VM from snapshot {{(pid=62070) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1263.764319] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4df05a48-cc7e-4347-8db9-6213fe593f96 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.771995] env[62070]: DEBUG oslo_vmware.api [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1263.771995] env[62070]: value = "task-1122651" [ 1263.771995] env[62070]: _type = "Task" [ 1263.771995] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.779671] env[62070]: DEBUG oslo_vmware.api [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122651, 'name': CloneVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.864796] env[62070]: DEBUG oslo_vmware.api [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Task: {'id': task-1122650, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091833} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.865309] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1263.865624] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1263.866013] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1263.866371] env[62070]: INFO nova.compute.manager [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1263.866803] env[62070]: DEBUG oslo.service.loopingcall [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1263.867160] env[62070]: DEBUG nova.compute.manager [-] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1263.867343] env[62070]: DEBUG nova.network.neutron [-] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1263.884766] env[62070]: DEBUG nova.network.neutron [-] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Instance cache missing network info. {{(pid=62070) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1264.281594] env[62070]: DEBUG oslo_vmware.api [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122651, 'name': CloneVM_Task} progress is 94%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.387427] env[62070]: DEBUG nova.network.neutron [-] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1264.782515] env[62070]: DEBUG oslo_vmware.api [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122651, 'name': CloneVM_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.890054] env[62070]: INFO nova.compute.manager [-] [instance: 2502f29e-be96-497a-b98c-61e4d0a4e817] Took 1.02 seconds to deallocate network for instance. [ 1265.283997] env[62070]: DEBUG oslo_vmware.api [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122651, 'name': CloneVM_Task, 'duration_secs': 1.043355} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.284324] env[62070]: INFO nova.virt.vmwareapi.vmops [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Created linked-clone VM from snapshot [ 1265.285043] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3df4580d-751d-43db-80a3-5fda571a2e0a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.294208] env[62070]: DEBUG nova.virt.vmwareapi.images [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Uploading image 5c23c587-97d3-45f2-a734-71a0876d72cb {{(pid=62070) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 1265.346436] env[62070]: DEBUG oslo_vmware.rw_handles [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1265.346436] env[62070]: value = "vm-245544" [ 1265.346436] env[62070]: _type = "VirtualMachine" [ 1265.346436] env[62070]: }. {{(pid=62070) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1265.346793] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-32928c8c-a427-48e8-a69d-dcc53025ddbf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.353961] env[62070]: DEBUG oslo_vmware.rw_handles [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lease: (returnval){ [ 1265.353961] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52d9b87a-238a-c59c-990f-6aca27bc631e" [ 1265.353961] env[62070]: _type = "HttpNfcLease" [ 1265.353961] env[62070]: } obtained for exporting VM: (result){ [ 1265.353961] env[62070]: value = "vm-245544" [ 1265.353961] env[62070]: _type = "VirtualMachine" [ 1265.353961] env[62070]: }. {{(pid=62070) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1265.354272] env[62070]: DEBUG oslo_vmware.api [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the lease: (returnval){ [ 1265.354272] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52d9b87a-238a-c59c-990f-6aca27bc631e" [ 1265.354272] env[62070]: _type = "HttpNfcLease" [ 1265.354272] env[62070]: } to be ready. {{(pid=62070) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1265.360086] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1265.360086] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52d9b87a-238a-c59c-990f-6aca27bc631e" [ 1265.360086] env[62070]: _type = "HttpNfcLease" [ 1265.360086] env[62070]: } is initializing. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1265.396089] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1265.396324] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1265.396544] env[62070]: DEBUG nova.objects.instance [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Lazy-loading 'resources' on Instance uuid 2502f29e-be96-497a-b98c-61e4d0a4e817 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1265.862140] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1265.862140] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52d9b87a-238a-c59c-990f-6aca27bc631e" [ 1265.862140] env[62070]: _type = "HttpNfcLease" [ 1265.862140] env[62070]: } is ready. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1265.862591] env[62070]: DEBUG oslo_vmware.rw_handles [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1265.862591] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52d9b87a-238a-c59c-990f-6aca27bc631e" [ 1265.862591] env[62070]: _type = "HttpNfcLease" [ 1265.862591] env[62070]: }. {{(pid=62070) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1265.863160] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a5382e3-465d-4634-aab3-82b16cdaf560 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.869755] env[62070]: DEBUG oslo_vmware.rw_handles [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522399ec-ef9c-bd97-965e-bf88028f4b40/disk-0.vmdk from lease info. {{(pid=62070) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1265.869942] env[62070]: DEBUG oslo_vmware.rw_handles [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522399ec-ef9c-bd97-965e-bf88028f4b40/disk-0.vmdk for reading. {{(pid=62070) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1265.958033] env[62070]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ef953457-9670-4d04-81af-95bee974e929 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.971760] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf97d84-d74b-427a-8c40-6ed1cf087e70 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.984871] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-942ff145-5add-479b-93ce-87acfcdb4f4d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.014585] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a104f0-a22f-47b8-a529-ecbef2184fdf {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.021619] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0fa9c4a-5051-4a75-ae93-d8f3d32ac61e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.034024] env[62070]: DEBUG nova.compute.provider_tree [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1266.537186] env[62070]: DEBUG nova.scheduler.client.report [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1267.043242] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.647s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1267.061353] env[62070]: INFO nova.scheduler.client.report [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Deleted allocations for instance 2502f29e-be96-497a-b98c-61e4d0a4e817 [ 1267.570527] env[62070]: DEBUG oslo_concurrency.lockutils [None req-0255ebd7-509d-452a-b950-f578b449e1fd tempest-ServerShowV247Test-709344000 tempest-ServerShowV247Test-709344000-project-member] Lock "2502f29e-be96-497a-b98c-61e4d0a4e817" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.840s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1274.145485] env[62070]: DEBUG oslo_vmware.rw_handles [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522399ec-ef9c-bd97-965e-bf88028f4b40/disk-0.vmdk. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1274.146516] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-091ece83-6ffb-40b7-89e5-9b3497416233 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.152744] env[62070]: DEBUG oslo_vmware.rw_handles [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522399ec-ef9c-bd97-965e-bf88028f4b40/disk-0.vmdk is in state: ready. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1274.152913] env[62070]: ERROR oslo_vmware.rw_handles [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522399ec-ef9c-bd97-965e-bf88028f4b40/disk-0.vmdk due to incomplete transfer. [ 1274.153155] env[62070]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-373f98af-0343-4b6e-ad85-7bb45d75a1e5 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.159430] env[62070]: DEBUG oslo_vmware.rw_handles [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522399ec-ef9c-bd97-965e-bf88028f4b40/disk-0.vmdk. {{(pid=62070) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1274.159625] env[62070]: DEBUG nova.virt.vmwareapi.images [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Uploaded image 5c23c587-97d3-45f2-a734-71a0876d72cb to the Glance image server {{(pid=62070) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 1274.162038] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Destroying the VM {{(pid=62070) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 1274.162270] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-6b19dab0-c2cb-420d-bf3d-e40f50f4ce59 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.168529] env[62070]: DEBUG oslo_vmware.api [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1274.168529] env[62070]: value = "task-1122653" [ 1274.168529] env[62070]: _type = "Task" [ 1274.168529] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.176596] env[62070]: DEBUG oslo_vmware.api [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122653, 'name': Destroy_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.678843] env[62070]: DEBUG oslo_vmware.api [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122653, 'name': Destroy_Task} progress is 100%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.178614] env[62070]: DEBUG oslo_vmware.api [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122653, 'name': Destroy_Task, 'duration_secs': 0.510827} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.179057] env[62070]: INFO nova.virt.vmwareapi.vm_util [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Destroyed the VM [ 1275.179134] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Deleting Snapshot of the VM instance {{(pid=62070) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1275.179388] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-505fbd1c-882d-42be-8992-fd67719b53d3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.186390] env[62070]: DEBUG oslo_vmware.api [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1275.186390] env[62070]: value = "task-1122654" [ 1275.186390] env[62070]: _type = "Task" [ 1275.186390] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.193794] env[62070]: DEBUG oslo_vmware.api [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122654, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.696429] env[62070]: DEBUG oslo_vmware.api [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122654, 'name': RemoveSnapshot_Task, 'duration_secs': 0.375394} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.696687] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Deleted Snapshot of the VM instance {{(pid=62070) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1275.696978] env[62070]: DEBUG nova.compute.manager [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1275.697769] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83cbae5-5dc6-4ae7-866a-59f7075991af {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.209825] env[62070]: INFO nova.compute.manager [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Shelve offloading [ 1276.211898] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1276.212153] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-97e04d43-94c1-4e61-92dd-5a3f052787f9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.220566] env[62070]: DEBUG oslo_vmware.api [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1276.220566] env[62070]: value = "task-1122655" [ 1276.220566] env[62070]: _type = "Task" [ 1276.220566] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.228650] env[62070]: DEBUG oslo_vmware.api [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122655, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.730554] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] VM already powered off {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1276.730786] env[62070]: DEBUG nova.compute.manager [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1276.731611] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0343087-49f5-4421-8de2-2cae4deaa1cb {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.737431] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "refresh_cache-229bb9ef-b8d8-40cb-a589-3aa280b904d7" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1276.737600] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquired lock "refresh_cache-229bb9ef-b8d8-40cb-a589-3aa280b904d7" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1276.737773] env[62070]: DEBUG nova.network.neutron [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1277.438399] env[62070]: DEBUG nova.network.neutron [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Updating instance_info_cache with network_info: [{"id": "d5bcacdd-b774-4af1-aa33-c5f4ec9198f2", "address": "fa:16:3e:38:d5:88", "network": {"id": "b9ef8f6c-bbd6-409d-a591-ad584e5e028f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-599171324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca25fba006b740f2a86fe10e4abe9400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5bcacdd-b7", "ovs_interfaceid": "d5bcacdd-b774-4af1-aa33-c5f4ec9198f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1277.941509] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Releasing lock "refresh_cache-229bb9ef-b8d8-40cb-a589-3aa280b904d7" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1278.148939] env[62070]: DEBUG nova.compute.manager [req-b496df37-5095-46de-99de-ab13b936962d req-8ec2d687-dcb8-4264-a6e9-375ea89b7ee7 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Received event network-vif-unplugged-d5bcacdd-b774-4af1-aa33-c5f4ec9198f2 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1278.149188] env[62070]: DEBUG oslo_concurrency.lockutils [req-b496df37-5095-46de-99de-ab13b936962d req-8ec2d687-dcb8-4264-a6e9-375ea89b7ee7 service nova] Acquiring lock "229bb9ef-b8d8-40cb-a589-3aa280b904d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1278.149427] env[62070]: DEBUG oslo_concurrency.lockutils [req-b496df37-5095-46de-99de-ab13b936962d req-8ec2d687-dcb8-4264-a6e9-375ea89b7ee7 service nova] Lock "229bb9ef-b8d8-40cb-a589-3aa280b904d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1278.149601] env[62070]: DEBUG oslo_concurrency.lockutils [req-b496df37-5095-46de-99de-ab13b936962d req-8ec2d687-dcb8-4264-a6e9-375ea89b7ee7 service nova] Lock "229bb9ef-b8d8-40cb-a589-3aa280b904d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.149772] env[62070]: DEBUG nova.compute.manager [req-b496df37-5095-46de-99de-ab13b936962d req-8ec2d687-dcb8-4264-a6e9-375ea89b7ee7 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] No waiting events found dispatching network-vif-unplugged-d5bcacdd-b774-4af1-aa33-c5f4ec9198f2 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1278.149947] env[62070]: WARNING nova.compute.manager [req-b496df37-5095-46de-99de-ab13b936962d req-8ec2d687-dcb8-4264-a6e9-375ea89b7ee7 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Received unexpected event network-vif-unplugged-d5bcacdd-b774-4af1-aa33-c5f4ec9198f2 for instance with vm_state shelved and task_state shelving_offloading. [ 1278.233258] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1278.234227] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f6c26ee-da7f-4877-b673-f7b13a421e11 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.241688] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1278.241921] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-460b1c2f-56ba-49a1-b9f8-bf17aec6a8a0 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.450160] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1278.450617] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Deleting contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1278.450617] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Deleting the datastore file [datastore1] 229bb9ef-b8d8-40cb-a589-3aa280b904d7 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1278.450934] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba87dd97-98d7-4b78-ad65-21ad891212de {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.457617] env[62070]: DEBUG oslo_vmware.api [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1278.457617] env[62070]: value = "task-1122657" [ 1278.457617] env[62070]: _type = "Task" [ 1278.457617] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.465217] env[62070]: DEBUG oslo_vmware.api [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122657, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.971152] env[62070]: DEBUG oslo_vmware.api [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122657, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148423} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.971515] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1278.971773] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Deleted contents of the VM from datastore datastore1 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1278.972024] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1278.997636] env[62070]: INFO nova.scheduler.client.report [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Deleted allocations for instance 229bb9ef-b8d8-40cb-a589-3aa280b904d7 [ 1279.502321] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1279.502778] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1279.502822] env[62070]: DEBUG nova.objects.instance [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lazy-loading 'resources' on Instance uuid 229bb9ef-b8d8-40cb-a589-3aa280b904d7 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1280.006153] env[62070]: DEBUG nova.objects.instance [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lazy-loading 'numa_topology' on Instance uuid 229bb9ef-b8d8-40cb-a589-3aa280b904d7 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1280.173558] env[62070]: DEBUG nova.compute.manager [req-c03c5e77-bfb0-4027-9e0e-b137080a9301 req-dc27a1fb-4372-47df-93a4-d99501710f51 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Received event network-changed-d5bcacdd-b774-4af1-aa33-c5f4ec9198f2 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1280.173770] env[62070]: DEBUG nova.compute.manager [req-c03c5e77-bfb0-4027-9e0e-b137080a9301 req-dc27a1fb-4372-47df-93a4-d99501710f51 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Refreshing instance network info cache due to event network-changed-d5bcacdd-b774-4af1-aa33-c5f4ec9198f2. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1280.173983] env[62070]: DEBUG oslo_concurrency.lockutils [req-c03c5e77-bfb0-4027-9e0e-b137080a9301 req-dc27a1fb-4372-47df-93a4-d99501710f51 service nova] Acquiring lock "refresh_cache-229bb9ef-b8d8-40cb-a589-3aa280b904d7" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1280.174148] env[62070]: DEBUG oslo_concurrency.lockutils [req-c03c5e77-bfb0-4027-9e0e-b137080a9301 req-dc27a1fb-4372-47df-93a4-d99501710f51 service nova] Acquired lock "refresh_cache-229bb9ef-b8d8-40cb-a589-3aa280b904d7" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.174313] env[62070]: DEBUG nova.network.neutron [req-c03c5e77-bfb0-4027-9e0e-b137080a9301 req-dc27a1fb-4372-47df-93a4-d99501710f51 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Refreshing network info cache for port d5bcacdd-b774-4af1-aa33-c5f4ec9198f2 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1280.508998] env[62070]: DEBUG nova.objects.base [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Object Instance<229bb9ef-b8d8-40cb-a589-3aa280b904d7> lazy-loaded attributes: resources,numa_topology {{(pid=62070) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1280.523593] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16401432-bc57-449a-8015-cd70f0b92c03 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.531687] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f6ca593-a57b-4c9a-af9a-1f4c8df1de13 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.560527] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f26559-2ad3-4e2f-9799-2c8fd6697f5c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.567541] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba99d674-2be7-4412-a017-bf8a9fbca42f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.580203] env[62070]: DEBUG nova.compute.provider_tree [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1280.891741] env[62070]: DEBUG nova.network.neutron [req-c03c5e77-bfb0-4027-9e0e-b137080a9301 req-dc27a1fb-4372-47df-93a4-d99501710f51 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Updated VIF entry in instance network info cache for port d5bcacdd-b774-4af1-aa33-c5f4ec9198f2. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1280.892114] env[62070]: DEBUG nova.network.neutron [req-c03c5e77-bfb0-4027-9e0e-b137080a9301 req-dc27a1fb-4372-47df-93a4-d99501710f51 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Updating instance_info_cache with network_info: [{"id": "d5bcacdd-b774-4af1-aa33-c5f4ec9198f2", "address": "fa:16:3e:38:d5:88", "network": {"id": "b9ef8f6c-bbd6-409d-a591-ad584e5e028f", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-599171324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca25fba006b740f2a86fe10e4abe9400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapd5bcacdd-b7", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1281.083824] env[62070]: DEBUG nova.scheduler.client.report [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1281.394899] env[62070]: DEBUG oslo_concurrency.lockutils [req-c03c5e77-bfb0-4027-9e0e-b137080a9301 req-dc27a1fb-4372-47df-93a4-d99501710f51 service nova] Releasing lock "refresh_cache-229bb9ef-b8d8-40cb-a589-3aa280b904d7" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1281.592113] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.088s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.098993] env[62070]: DEBUG oslo_concurrency.lockutils [None req-4e6a0c86-29ca-4d35-91e0-39160272a541 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "229bb9ef-b8d8-40cb-a589-3aa280b904d7" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 20.926s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.282736] env[62070]: DEBUG oslo_concurrency.lockutils [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "229bb9ef-b8d8-40cb-a589-3aa280b904d7" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.283021] env[62070]: DEBUG oslo_concurrency.lockutils [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "229bb9ef-b8d8-40cb-a589-3aa280b904d7" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1282.283227] env[62070]: INFO nova.compute.manager [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Unshelving [ 1283.306023] env[62070]: DEBUG oslo_concurrency.lockutils [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.306290] env[62070]: DEBUG oslo_concurrency.lockutils [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.306331] env[62070]: DEBUG nova.objects.instance [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lazy-loading 'pci_requests' on Instance uuid 229bb9ef-b8d8-40cb-a589-3aa280b904d7 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1283.809625] env[62070]: DEBUG nova.objects.instance [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lazy-loading 'numa_topology' on Instance uuid 229bb9ef-b8d8-40cb-a589-3aa280b904d7 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1284.312284] env[62070]: INFO nova.compute.claims [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1285.346887] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c289e0e0-f1f1-4b34-b237-1195bf900fa4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.354394] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f715e450-ff2a-4683-ae52-c25daae4d876 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.383914] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e71de279-8d63-4afb-a69b-3f553516fee1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.390863] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc5e561-39bf-45f4-aa12-146952bada56 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.403353] env[62070]: DEBUG nova.compute.provider_tree [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1285.905987] env[62070]: DEBUG nova.scheduler.client.report [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1286.411477] env[62070]: DEBUG oslo_concurrency.lockutils [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.105s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1286.439453] env[62070]: INFO nova.network.neutron [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Updating port d5bcacdd-b774-4af1-aa33-c5f4ec9198f2 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1287.836362] env[62070]: DEBUG nova.compute.manager [req-821b8ce2-72d9-4166-b457-81e5ebfe1b14 req-dc378132-c68d-441a-8ff4-5db58a5e92e0 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Received event network-vif-plugged-d5bcacdd-b774-4af1-aa33-c5f4ec9198f2 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1287.836649] env[62070]: DEBUG oslo_concurrency.lockutils [req-821b8ce2-72d9-4166-b457-81e5ebfe1b14 req-dc378132-c68d-441a-8ff4-5db58a5e92e0 service nova] Acquiring lock "229bb9ef-b8d8-40cb-a589-3aa280b904d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1287.836921] env[62070]: DEBUG oslo_concurrency.lockutils [req-821b8ce2-72d9-4166-b457-81e5ebfe1b14 req-dc378132-c68d-441a-8ff4-5db58a5e92e0 service nova] Lock "229bb9ef-b8d8-40cb-a589-3aa280b904d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1287.836962] env[62070]: DEBUG oslo_concurrency.lockutils [req-821b8ce2-72d9-4166-b457-81e5ebfe1b14 req-dc378132-c68d-441a-8ff4-5db58a5e92e0 service nova] Lock "229bb9ef-b8d8-40cb-a589-3aa280b904d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1287.837127] env[62070]: DEBUG nova.compute.manager [req-821b8ce2-72d9-4166-b457-81e5ebfe1b14 req-dc378132-c68d-441a-8ff4-5db58a5e92e0 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] No waiting events found dispatching network-vif-plugged-d5bcacdd-b774-4af1-aa33-c5f4ec9198f2 {{(pid=62070) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1287.837303] env[62070]: WARNING nova.compute.manager [req-821b8ce2-72d9-4166-b457-81e5ebfe1b14 req-dc378132-c68d-441a-8ff4-5db58a5e92e0 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Received unexpected event network-vif-plugged-d5bcacdd-b774-4af1-aa33-c5f4ec9198f2 for instance with vm_state shelved_offloaded and task_state spawning. [ 1287.921542] env[62070]: DEBUG oslo_concurrency.lockutils [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "refresh_cache-229bb9ef-b8d8-40cb-a589-3aa280b904d7" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1287.921733] env[62070]: DEBUG oslo_concurrency.lockutils [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquired lock "refresh_cache-229bb9ef-b8d8-40cb-a589-3aa280b904d7" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1287.921915] env[62070]: DEBUG nova.network.neutron [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Building network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1288.622644] env[62070]: DEBUG nova.network.neutron [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Updating instance_info_cache with network_info: [{"id": "d5bcacdd-b774-4af1-aa33-c5f4ec9198f2", "address": "fa:16:3e:38:d5:88", "network": {"id": "b9ef8f6c-bbd6-409d-a591-ad584e5e028f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-599171324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca25fba006b740f2a86fe10e4abe9400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5bcacdd-b7", "ovs_interfaceid": "d5bcacdd-b774-4af1-aa33-c5f4ec9198f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1289.124982] env[62070]: DEBUG oslo_concurrency.lockutils [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Releasing lock "refresh_cache-229bb9ef-b8d8-40cb-a589-3aa280b904d7" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1289.152009] env[62070]: DEBUG nova.virt.hardware [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-03T09:21:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='adea3382cb179bee9b9ed289c88f2b71',container_format='bare',created_at=2024-10-03T09:35:08Z,direct_url=,disk_format='vmdk',id=5c23c587-97d3-45f2-a734-71a0876d72cb,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-25230177-shelved',owner='ca25fba006b740f2a86fe10e4abe9400',properties=ImageMetaProps,protected=,size=31669760,status='active',tags=,updated_at=2024-10-03T09:35:22Z,virtual_size=,visibility=), allow threads: False {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1289.152271] env[62070]: DEBUG nova.virt.hardware [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Flavor limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1289.152436] env[62070]: DEBUG nova.virt.hardware [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Image limits 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1289.152624] env[62070]: DEBUG nova.virt.hardware [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Flavor pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1289.152773] env[62070]: DEBUG nova.virt.hardware [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Image pref 0:0:0 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1289.152922] env[62070]: DEBUG nova.virt.hardware [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62070) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1289.153148] env[62070]: DEBUG nova.virt.hardware [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1289.153314] env[62070]: DEBUG nova.virt.hardware [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1289.153491] env[62070]: DEBUG nova.virt.hardware [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Got 1 possible topologies {{(pid=62070) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1289.153657] env[62070]: DEBUG nova.virt.hardware [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1289.153835] env[62070]: DEBUG nova.virt.hardware [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62070) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1289.154692] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f042eff6-a9ab-4c01-8267-5125cd6e8c16 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.162701] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7955e05-0acf-4c95-b31e-03c6eee6174e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.176178] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:d5:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa8c2f93-f287-41b3-adb6-4942a7ea2a0b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd5bcacdd-b774-4af1-aa33-c5f4ec9198f2', 'vif_model': 'vmxnet3'}] {{(pid=62070) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1289.183301] env[62070]: DEBUG oslo.service.loopingcall [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1289.183521] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Creating VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1289.183712] env[62070]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-640a6df5-2908-4c09-84ab-58f3acd35f62 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.201562] env[62070]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1289.201562] env[62070]: value = "task-1122658" [ 1289.201562] env[62070]: _type = "Task" [ 1289.201562] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.209256] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122658, 'name': CreateVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.711694] env[62070]: DEBUG oslo_vmware.api [-] Task: {'id': task-1122658, 'name': CreateVM_Task, 'duration_secs': 0.298193} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.711871] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Created VM on the ESX host {{(pid=62070) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1289.712510] env[62070]: DEBUG oslo_concurrency.lockutils [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5c23c587-97d3-45f2-a734-71a0876d72cb" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1289.712687] env[62070]: DEBUG oslo_concurrency.lockutils [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5c23c587-97d3-45f2-a734-71a0876d72cb" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1289.713085] env[62070]: DEBUG oslo_concurrency.lockutils [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5c23c587-97d3-45f2-a734-71a0876d72cb" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1289.713340] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d99194b-32e4-4d31-bb21-e15246246664 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.717463] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1289.717463] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]528333d6-989a-5b9e-5094-100eeb2b029f" [ 1289.717463] env[62070]: _type = "Task" [ 1289.717463] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.724790] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]528333d6-989a-5b9e-5094-100eeb2b029f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.864920] env[62070]: DEBUG nova.compute.manager [req-0fcb5a99-1bb9-4d03-9d09-9e7b5c2a3719 req-620a1c6b-d359-4ef0-ad01-ff4bfcc73623 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Received event network-changed-d5bcacdd-b774-4af1-aa33-c5f4ec9198f2 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1289.864920] env[62070]: DEBUG nova.compute.manager [req-0fcb5a99-1bb9-4d03-9d09-9e7b5c2a3719 req-620a1c6b-d359-4ef0-ad01-ff4bfcc73623 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Refreshing instance network info cache due to event network-changed-d5bcacdd-b774-4af1-aa33-c5f4ec9198f2. {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1289.864920] env[62070]: DEBUG oslo_concurrency.lockutils [req-0fcb5a99-1bb9-4d03-9d09-9e7b5c2a3719 req-620a1c6b-d359-4ef0-ad01-ff4bfcc73623 service nova] Acquiring lock "refresh_cache-229bb9ef-b8d8-40cb-a589-3aa280b904d7" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1289.865095] env[62070]: DEBUG oslo_concurrency.lockutils [req-0fcb5a99-1bb9-4d03-9d09-9e7b5c2a3719 req-620a1c6b-d359-4ef0-ad01-ff4bfcc73623 service nova] Acquired lock "refresh_cache-229bb9ef-b8d8-40cb-a589-3aa280b904d7" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1289.865266] env[62070]: DEBUG nova.network.neutron [req-0fcb5a99-1bb9-4d03-9d09-9e7b5c2a3719 req-620a1c6b-d359-4ef0-ad01-ff4bfcc73623 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Refreshing network info cache for port d5bcacdd-b774-4af1-aa33-c5f4ec9198f2 {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1290.227743] env[62070]: DEBUG oslo_concurrency.lockutils [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5c23c587-97d3-45f2-a734-71a0876d72cb" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1290.228093] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Processing image 5c23c587-97d3-45f2-a734-71a0876d72cb {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1290.228215] env[62070]: DEBUG oslo_concurrency.lockutils [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5c23c587-97d3-45f2-a734-71a0876d72cb/5c23c587-97d3-45f2-a734-71a0876d72cb.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1290.228373] env[62070]: DEBUG oslo_concurrency.lockutils [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5c23c587-97d3-45f2-a734-71a0876d72cb/5c23c587-97d3-45f2-a734-71a0876d72cb.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1290.228563] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1290.228805] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-868c6bc7-c9a1-40c8-ae34-f7e30c0fe4da {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.246055] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1290.246239] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62070) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1290.246906] env[62070]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-005e5f15-51a9-4eff-a390-a093c40d3486 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.251632] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1290.251632] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]52bfe367-b058-aa60-b36d-964b87f5b628" [ 1290.251632] env[62070]: _type = "Task" [ 1290.251632] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.258713] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': session[52f37560-87ef-95a6-a0aa-393127576bf7]52bfe367-b058-aa60-b36d-964b87f5b628, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.555096] env[62070]: DEBUG nova.network.neutron [req-0fcb5a99-1bb9-4d03-9d09-9e7b5c2a3719 req-620a1c6b-d359-4ef0-ad01-ff4bfcc73623 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Updated VIF entry in instance network info cache for port d5bcacdd-b774-4af1-aa33-c5f4ec9198f2. {{(pid=62070) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1290.555476] env[62070]: DEBUG nova.network.neutron [req-0fcb5a99-1bb9-4d03-9d09-9e7b5c2a3719 req-620a1c6b-d359-4ef0-ad01-ff4bfcc73623 service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Updating instance_info_cache with network_info: [{"id": "d5bcacdd-b774-4af1-aa33-c5f4ec9198f2", "address": "fa:16:3e:38:d5:88", "network": {"id": "b9ef8f6c-bbd6-409d-a591-ad584e5e028f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-599171324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca25fba006b740f2a86fe10e4abe9400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5bcacdd-b7", "ovs_interfaceid": "d5bcacdd-b774-4af1-aa33-c5f4ec9198f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1290.761490] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Preparing fetch location {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1290.761726] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Fetch image to [datastore2] OSTACK_IMG_b4db687a-5966-4d69-9e7d-dc1523681312/OSTACK_IMG_b4db687a-5966-4d69-9e7d-dc1523681312.vmdk {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1290.761916] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Downloading stream optimized image 5c23c587-97d3-45f2-a734-71a0876d72cb to [datastore2] OSTACK_IMG_b4db687a-5966-4d69-9e7d-dc1523681312/OSTACK_IMG_b4db687a-5966-4d69-9e7d-dc1523681312.vmdk on the data store datastore2 as vApp {{(pid=62070) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1290.762111] env[62070]: DEBUG nova.virt.vmwareapi.images [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Downloading image file data 5c23c587-97d3-45f2-a734-71a0876d72cb to the ESX as VM named 'OSTACK_IMG_b4db687a-5966-4d69-9e7d-dc1523681312' {{(pid=62070) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1290.827080] env[62070]: DEBUG oslo_vmware.rw_handles [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1290.827080] env[62070]: value = "resgroup-9" [ 1290.827080] env[62070]: _type = "ResourcePool" [ 1290.827080] env[62070]: }. {{(pid=62070) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1290.827398] env[62070]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-a40058b8-ecc3-4c35-beac-983979eb82e8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.847786] env[62070]: DEBUG oslo_vmware.rw_handles [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lease: (returnval){ [ 1290.847786] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]528e26a0-0964-c70f-e4f6-0de1ce1394ab" [ 1290.847786] env[62070]: _type = "HttpNfcLease" [ 1290.847786] env[62070]: } obtained for vApp import into resource pool (val){ [ 1290.847786] env[62070]: value = "resgroup-9" [ 1290.847786] env[62070]: _type = "ResourcePool" [ 1290.847786] env[62070]: }. {{(pid=62070) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1290.848102] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the lease: (returnval){ [ 1290.848102] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]528e26a0-0964-c70f-e4f6-0de1ce1394ab" [ 1290.848102] env[62070]: _type = "HttpNfcLease" [ 1290.848102] env[62070]: } to be ready. {{(pid=62070) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1290.854186] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1290.854186] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]528e26a0-0964-c70f-e4f6-0de1ce1394ab" [ 1290.854186] env[62070]: _type = "HttpNfcLease" [ 1290.854186] env[62070]: } is initializing. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1291.058513] env[62070]: DEBUG oslo_concurrency.lockutils [req-0fcb5a99-1bb9-4d03-9d09-9e7b5c2a3719 req-620a1c6b-d359-4ef0-ad01-ff4bfcc73623 service nova] Releasing lock "refresh_cache-229bb9ef-b8d8-40cb-a589-3aa280b904d7" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1291.356245] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1291.356245] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]528e26a0-0964-c70f-e4f6-0de1ce1394ab" [ 1291.356245] env[62070]: _type = "HttpNfcLease" [ 1291.356245] env[62070]: } is initializing. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1291.858454] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1291.858454] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]528e26a0-0964-c70f-e4f6-0de1ce1394ab" [ 1291.858454] env[62070]: _type = "HttpNfcLease" [ 1291.858454] env[62070]: } is initializing. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1292.357844] env[62070]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1292.357844] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]528e26a0-0964-c70f-e4f6-0de1ce1394ab" [ 1292.357844] env[62070]: _type = "HttpNfcLease" [ 1292.357844] env[62070]: } is ready. {{(pid=62070) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1292.358248] env[62070]: DEBUG oslo_vmware.rw_handles [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1292.358248] env[62070]: value = "session[52f37560-87ef-95a6-a0aa-393127576bf7]528e26a0-0964-c70f-e4f6-0de1ce1394ab" [ 1292.358248] env[62070]: _type = "HttpNfcLease" [ 1292.358248] env[62070]: }. {{(pid=62070) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1292.358910] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba8daf39-5a86-4e39-a002-9dbd1bfa3655 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.365791] env[62070]: DEBUG oslo_vmware.rw_handles [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527095e0-bd71-4f95-df06-ccc0da46922d/disk-0.vmdk from lease info. {{(pid=62070) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1292.365971] env[62070]: DEBUG oslo_vmware.rw_handles [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Creating HTTP connection to write to file with size = 31669760 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527095e0-bd71-4f95-df06-ccc0da46922d/disk-0.vmdk. {{(pid=62070) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1292.428773] env[62070]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a63d7b70-f047-4f37-8b07-2a215137ba96 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.569727] env[62070]: DEBUG oslo_vmware.rw_handles [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Completed reading data from the image iterator. {{(pid=62070) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1293.570129] env[62070]: DEBUG oslo_vmware.rw_handles [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527095e0-bd71-4f95-df06-ccc0da46922d/disk-0.vmdk. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1293.570962] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e51aff39-0288-4572-be72-9009755d2fc3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.577510] env[62070]: DEBUG oslo_vmware.rw_handles [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527095e0-bd71-4f95-df06-ccc0da46922d/disk-0.vmdk is in state: ready. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1293.577681] env[62070]: DEBUG oslo_vmware.rw_handles [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527095e0-bd71-4f95-df06-ccc0da46922d/disk-0.vmdk. {{(pid=62070) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1293.577910] env[62070]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-b99455e9-f62a-4fc4-810a-8751b4635fb3 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.748533] env[62070]: DEBUG oslo_vmware.rw_handles [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527095e0-bd71-4f95-df06-ccc0da46922d/disk-0.vmdk. {{(pid=62070) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1293.748813] env[62070]: INFO nova.virt.vmwareapi.images [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Downloaded image file data 5c23c587-97d3-45f2-a734-71a0876d72cb [ 1293.749675] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a7f7bc-6296-4a19-b7c0-ba3eec4bea4d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.764989] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b0055117-59f6-4f36-bba6-e92447278106 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.828022] env[62070]: INFO nova.virt.vmwareapi.images [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] The imported VM was unregistered [ 1293.830466] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Caching image {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1293.830718] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Creating directory with path [datastore2] devstack-image-cache_base/5c23c587-97d3-45f2-a734-71a0876d72cb {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1293.830978] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a2c0468d-d095-461d-a60f-deb3e55a19bc {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.841676] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Created directory with path [datastore2] devstack-image-cache_base/5c23c587-97d3-45f2-a734-71a0876d72cb {{(pid=62070) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1293.841866] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_b4db687a-5966-4d69-9e7d-dc1523681312/OSTACK_IMG_b4db687a-5966-4d69-9e7d-dc1523681312.vmdk to [datastore2] devstack-image-cache_base/5c23c587-97d3-45f2-a734-71a0876d72cb/5c23c587-97d3-45f2-a734-71a0876d72cb.vmdk. {{(pid=62070) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1293.842126] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-39bf35d6-b874-47c3-96db-e291c5060474 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.848308] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1293.848308] env[62070]: value = "task-1122661" [ 1293.848308] env[62070]: _type = "Task" [ 1293.848308] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.855413] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122661, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.357851] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122661, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.858459] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122661, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.359634] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122661, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.860205] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122661, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.361189] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122661, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.11545} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.361474] env[62070]: INFO nova.virt.vmwareapi.ds_util [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_b4db687a-5966-4d69-9e7d-dc1523681312/OSTACK_IMG_b4db687a-5966-4d69-9e7d-dc1523681312.vmdk to [datastore2] devstack-image-cache_base/5c23c587-97d3-45f2-a734-71a0876d72cb/5c23c587-97d3-45f2-a734-71a0876d72cb.vmdk. [ 1296.361669] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Cleaning up location [datastore2] OSTACK_IMG_b4db687a-5966-4d69-9e7d-dc1523681312 {{(pid=62070) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1296.361840] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_b4db687a-5966-4d69-9e7d-dc1523681312 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1296.362104] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f7d8c26-8ea7-40a5-b35c-2119f6616e9c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.368322] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1296.368322] env[62070]: value = "task-1122662" [ 1296.368322] env[62070]: _type = "Task" [ 1296.368322] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.375592] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122662, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.878529] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122662, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.041615} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.878939] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1296.878939] env[62070]: DEBUG oslo_concurrency.lockutils [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5c23c587-97d3-45f2-a734-71a0876d72cb/5c23c587-97d3-45f2-a734-71a0876d72cb.vmdk" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1296.879212] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/5c23c587-97d3-45f2-a734-71a0876d72cb/5c23c587-97d3-45f2-a734-71a0876d72cb.vmdk to [datastore2] 229bb9ef-b8d8-40cb-a589-3aa280b904d7/229bb9ef-b8d8-40cb-a589-3aa280b904d7.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1296.879464] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-060f36d9-e6e0-4761-bd41-936be5628810 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.885822] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1296.885822] env[62070]: value = "task-1122663" [ 1296.885822] env[62070]: _type = "Task" [ 1296.885822] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.894075] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122663, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.397028] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122663, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.896521] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122663, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.397851] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122663, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.900535] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122663, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.399017] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122663, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.264307} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.399307] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/5c23c587-97d3-45f2-a734-71a0876d72cb/5c23c587-97d3-45f2-a734-71a0876d72cb.vmdk to [datastore2] 229bb9ef-b8d8-40cb-a589-3aa280b904d7/229bb9ef-b8d8-40cb-a589-3aa280b904d7.vmdk {{(pid=62070) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1299.400086] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b3ae906-0006-404b-8144-9a19f3858348 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.420846] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] 229bb9ef-b8d8-40cb-a589-3aa280b904d7/229bb9ef-b8d8-40cb-a589-3aa280b904d7.vmdk or device None with type streamOptimized {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1299.421098] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a1708dd7-574f-429b-b067-52afdbc11b43 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.439543] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1299.439543] env[62070]: value = "task-1122664" [ 1299.439543] env[62070]: _type = "Task" [ 1299.439543] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.446698] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122664, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.948727] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122664, 'name': ReconfigVM_Task, 'duration_secs': 0.301097} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.949088] env[62070]: DEBUG nova.virt.vmwareapi.volumeops [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Reconfigured VM instance instance-00000070 to attach disk [datastore2] 229bb9ef-b8d8-40cb-a589-3aa280b904d7/229bb9ef-b8d8-40cb-a589-3aa280b904d7.vmdk or device None with type streamOptimized {{(pid=62070) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1299.949737] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-950ef206-b7f7-4b70-a19b-3c87abe2dd6f {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.955475] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1299.955475] env[62070]: value = "task-1122665" [ 1299.955475] env[62070]: _type = "Task" [ 1299.955475] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.962463] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122665, 'name': Rename_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.465126] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122665, 'name': Rename_Task, 'duration_secs': 0.138483} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.465385] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Powering on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1300.465636] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a7e01df2-f04e-45c6-b884-5581eb5e9020 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.471634] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1300.471634] env[62070]: value = "task-1122666" [ 1300.471634] env[62070]: _type = "Task" [ 1300.471634] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.479149] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122666, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.980727] env[62070]: DEBUG oslo_vmware.api [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122666, 'name': PowerOnVM_Task, 'duration_secs': 0.457461} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.981143] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Powered on the VM {{(pid=62070) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1301.070520] env[62070]: DEBUG nova.compute.manager [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Checking state {{(pid=62070) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1301.071545] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2643470b-3617-4fd3-8bc0-b20c154b1cdd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.588309] env[62070]: DEBUG oslo_concurrency.lockutils [None req-57775761-0de8-48be-8b8b-255fd17db7d5 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "229bb9ef-b8d8-40cb-a589-3aa280b904d7" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 19.305s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1304.603562] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1304.603945] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1304.604173] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Starting heal instance info cache {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1305.134483] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "refresh_cache-229bb9ef-b8d8-40cb-a589-3aa280b904d7" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1305.134632] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquired lock "refresh_cache-229bb9ef-b8d8-40cb-a589-3aa280b904d7" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1305.134781] env[62070]: DEBUG nova.network.neutron [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Forcefully refreshing network info cache for instance {{(pid=62070) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1306.368648] env[62070]: DEBUG nova.network.neutron [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Updating instance_info_cache with network_info: [{"id": "d5bcacdd-b774-4af1-aa33-c5f4ec9198f2", "address": "fa:16:3e:38:d5:88", "network": {"id": "b9ef8f6c-bbd6-409d-a591-ad584e5e028f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-599171324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca25fba006b740f2a86fe10e4abe9400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5bcacdd-b7", "ovs_interfaceid": "d5bcacdd-b774-4af1-aa33-c5f4ec9198f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1306.871219] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Releasing lock "refresh_cache-229bb9ef-b8d8-40cb-a589-3aa280b904d7" {{(pid=62070) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1306.871470] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Updated the network info_cache for instance {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1306.871689] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1306.871851] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1306.871997] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1306.872162] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1306.872301] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1306.872439] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1306.872567] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62070) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1306.872707] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1307.376550] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1307.376940] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1307.376940] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1307.377129] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62070) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1307.378070] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97642445-ee45-46e6-9f6d-887aada847ac {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.386742] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a5bd1ca-eb3f-49da-9229-bb3604fb201a {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.401108] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6794f5f-0092-4fbc-9a32-7b4eafd3cc67 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.407267] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea536e80-3d19-45c2-bef3-800160e376f4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.435551] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181253MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=62070) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1307.435700] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1307.435874] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1308.460505] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Instance 229bb9ef-b8d8-40cb-a589-3aa280b904d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62070) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1308.460862] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62070) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1308.460862] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62070) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1308.476570] env[62070]: DEBUG nova.scheduler.client.report [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Refreshing inventories for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1308.488290] env[62070]: DEBUG nova.scheduler.client.report [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Updating ProviderTree inventory for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1308.488471] env[62070]: DEBUG nova.compute.provider_tree [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Updating inventory in ProviderTree for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1308.498620] env[62070]: DEBUG nova.scheduler.client.report [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Refreshing aggregate associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, aggregates: None {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1308.516678] env[62070]: DEBUG nova.scheduler.client.report [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Refreshing trait associations for resource provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62070) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1308.540859] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d7b85d7-a929-4659-8fff-7047b8cd5a6d {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.548030] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-998d8689-e263-4c04-9bb7-a947f999676c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.577445] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d5de479-65c9-4c2b-a723-6c7e02cfe6e9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.584468] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b4c25cf-75fa-49cd-8d74-b05524d283c8 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.596719] env[62070]: DEBUG nova.compute.provider_tree [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1309.099245] env[62070]: DEBUG nova.scheduler.client.report [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1309.604449] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62070) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1309.604827] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.169s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1338.905259] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "229bb9ef-b8d8-40cb-a589-3aa280b904d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1338.905696] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "229bb9ef-b8d8-40cb-a589-3aa280b904d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1338.905807] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "229bb9ef-b8d8-40cb-a589-3aa280b904d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1338.905992] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "229bb9ef-b8d8-40cb-a589-3aa280b904d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1338.906193] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "229bb9ef-b8d8-40cb-a589-3aa280b904d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1338.908627] env[62070]: INFO nova.compute.manager [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Terminating instance [ 1338.910480] env[62070]: DEBUG nova.compute.manager [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Start destroying the instance on the hypervisor. {{(pid=62070) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1338.910738] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Destroying instance {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1338.911722] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ce5768-75cf-485c-8882-af7898b01b07 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.921519] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Powering off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1338.921781] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-47fded70-6d6a-47c8-8aad-af2b7e3c94a9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.928480] env[62070]: DEBUG oslo_vmware.api [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1338.928480] env[62070]: value = "task-1122667" [ 1338.928480] env[62070]: _type = "Task" [ 1338.928480] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.937220] env[62070]: DEBUG oslo_vmware.api [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122667, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.438406] env[62070]: DEBUG oslo_vmware.api [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122667, 'name': PowerOffVM_Task, 'duration_secs': 0.17657} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.439033] env[62070]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Powered off the VM {{(pid=62070) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1339.439898] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Unregistering the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1339.439898] env[62070]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48352101-a9bb-45d3-858c-4f3a78d2e83e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.506613] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Unregistered the VM {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1339.506845] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Deleting contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1339.506941] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Deleting the datastore file [datastore2] 229bb9ef-b8d8-40cb-a589-3aa280b904d7 {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1339.507168] env[62070]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-68b547aa-d225-48c0-9ada-26191bd83eb4 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.513608] env[62070]: DEBUG oslo_vmware.api [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for the task: (returnval){ [ 1339.513608] env[62070]: value = "task-1122669" [ 1339.513608] env[62070]: _type = "Task" [ 1339.513608] env[62070]: } to complete. {{(pid=62070) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.521244] env[62070]: DEBUG oslo_vmware.api [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122669, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.022969] env[62070]: DEBUG oslo_vmware.api [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Task: {'id': task-1122669, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139923} completed successfully. {{(pid=62070) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.023367] env[62070]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Deleted the datastore file {{(pid=62070) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1340.023434] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Deleted contents of the VM from datastore datastore2 {{(pid=62070) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1340.023610] env[62070]: DEBUG nova.virt.vmwareapi.vmops [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Instance destroyed {{(pid=62070) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1340.023804] env[62070]: INFO nova.compute.manager [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1340.024073] env[62070]: DEBUG oslo.service.loopingcall [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62070) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1340.024278] env[62070]: DEBUG nova.compute.manager [-] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Deallocating network for instance {{(pid=62070) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1340.024372] env[62070]: DEBUG nova.network.neutron [-] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] deallocate_for_instance() {{(pid=62070) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1340.453285] env[62070]: DEBUG nova.compute.manager [req-23faa897-b547-45d5-9d95-da95218515d1 req-09575997-625d-4d15-903a-2109b3cc1f0e service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Received event network-vif-deleted-d5bcacdd-b774-4af1-aa33-c5f4ec9198f2 {{(pid=62070) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1340.453495] env[62070]: INFO nova.compute.manager [req-23faa897-b547-45d5-9d95-da95218515d1 req-09575997-625d-4d15-903a-2109b3cc1f0e service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Neutron deleted interface d5bcacdd-b774-4af1-aa33-c5f4ec9198f2; detaching it from the instance and deleting it from the info cache [ 1340.453680] env[62070]: DEBUG nova.network.neutron [req-23faa897-b547-45d5-9d95-da95218515d1 req-09575997-625d-4d15-903a-2109b3cc1f0e service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1340.930942] env[62070]: DEBUG nova.network.neutron [-] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Updating instance_info_cache with network_info: [] {{(pid=62070) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1340.956707] env[62070]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e37825e8-5c5d-4965-9ce3-8199c5fc24d1 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.970715] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-749be71e-f465-423c-b839-bfe39b01100c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.992755] env[62070]: DEBUG nova.compute.manager [req-23faa897-b547-45d5-9d95-da95218515d1 req-09575997-625d-4d15-903a-2109b3cc1f0e service nova] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Detach interface failed, port_id=d5bcacdd-b774-4af1-aa33-c5f4ec9198f2, reason: Instance 229bb9ef-b8d8-40cb-a589-3aa280b904d7 could not be found. {{(pid=62070) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1341.433718] env[62070]: INFO nova.compute.manager [-] [instance: 229bb9ef-b8d8-40cb-a589-3aa280b904d7] Took 1.41 seconds to deallocate network for instance. [ 1341.940606] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1341.940906] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1341.941162] env[62070]: DEBUG nova.objects.instance [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lazy-loading 'resources' on Instance uuid 229bb9ef-b8d8-40cb-a589-3aa280b904d7 {{(pid=62070) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1342.474546] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55527730-dbf6-497d-9c20-6f169383c61b {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.481792] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1966cffc-eb37-49be-96a8-a5b4e2a9c788 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.511532] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-575a3760-8298-43f2-add5-34bb7ecbc0ce {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.518324] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e2d9b6-4145-4d33-b99d-a5cc3df7e116 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.531686] env[62070]: DEBUG nova.compute.provider_tree [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1343.034614] env[62070]: DEBUG nova.scheduler.client.report [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1343.539887] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.599s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1343.560828] env[62070]: INFO nova.scheduler.client.report [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Deleted allocations for instance 229bb9ef-b8d8-40cb-a589-3aa280b904d7 [ 1344.068101] env[62070]: DEBUG oslo_concurrency.lockutils [None req-7fcaaa11-ffc8-4adb-9f4d-f95a589172d7 tempest-AttachVolumeShelveTestJSON-1287321211 tempest-AttachVolumeShelveTestJSON-1287321211-project-member] Lock "229bb9ef-b8d8-40cb-a589-3aa280b904d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.162s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1357.850091] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1357.850534] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1358.354736] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1358.354930] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Starting heal instance info cache {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1358.354930] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Rebuilding the list of instances to heal {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1358.857552] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Didn't find any instances for network info cache update. {{(pid=62070) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1358.858047] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1358.858047] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1358.858183] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1358.858265] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1358.858404] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1358.858545] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1358.858696] env[62070]: DEBUG nova.compute.manager [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62070) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1358.858863] env[62070]: DEBUG oslo_service.periodic_task [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62070) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1359.361766] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1359.362071] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1359.362188] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1359.362347] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62070) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1359.363269] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a4ca214-ffe4-4fd5-844a-510cd5d7492c {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.371217] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2e41161-9a68-4d43-863d-2d8a5c366434 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.384667] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6c7fcd-f81a-4034-9afd-38c98e7bcab9 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.390408] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee29631b-615e-4c80-8872-838110f560dd {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.417732] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181152MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=62070) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1359.417862] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1359.418054] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1360.436771] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62070) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1360.437059] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62070) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1360.449554] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef274765-17fb-4136-ba3e-4a7baa61af01 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.457226] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-620a63a2-3b28-44f7-8108-048d7a48f4e2 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.487246] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d35a2168-4a03-4df3-9cda-aafc966d097e {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.493682] env[62070]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c19d8bb-cbb2-446f-874d-13f438c9a955 {{(pid=62070) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.505995] env[62070]: DEBUG nova.compute.provider_tree [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Inventory has not changed in ProviderTree for provider: 21c7c111-1b69-4468-b2c4-5dd96014fbd6 {{(pid=62070) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1361.008872] env[62070]: DEBUG nova.scheduler.client.report [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Inventory has not changed for provider 21c7c111-1b69-4468-b2c4-5dd96014fbd6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62070) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1361.514827] env[62070]: DEBUG nova.compute.resource_tracker [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62070) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1361.515245] env[62070]: DEBUG oslo_concurrency.lockutils [None req-6bbbc55d-8386-4096-99f4-2c2835bc0d97 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.097s {{(pid=62070) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}